{ )} {showSendButton && } - {typingEnabled && showEmojiPanel && ( + {showEmojiPanel && ( { } /* eslint-enable @typescript-eslint/no-misused-promises */ - private fetchUsersForOpenGroup( query: string, callback: (data: Array) => void @@ -478,9 +482,9 @@ class CompositionBoxInner extends Component { } private fetchUsersForGroup(query: string, callback: (data: Array) => void) { - let overridenQuery = query; + let overriddenQuery = query; if (!query) { - overridenQuery = ''; + overriddenQuery = ''; } if (!this.props.selectedConversation) { return; @@ -491,11 +495,11 @@ class CompositionBoxInner extends Component { } if (this.props.selectedConversation.isPublic) { - this.fetchUsersForOpenGroup(overridenQuery, callback); + this.fetchUsersForOpenGroup(overriddenQuery, callback); return; } // can only be a closed group here - this.fetchUsersForClosedGroup(overridenQuery, callback); + this.fetchUsersForClosedGroup(overriddenQuery, callback); } private fetchUsersForClosedGroup( @@ -512,9 +516,9 @@ class CompositionBoxInner extends Component { } const allMembers = allPubKeys.map(pubKey => { - const conv = getConversationController().get(pubKey); + const convo = ConvoHub.use().get(pubKey); const profileName = - conv?.getNicknameOrRealUsernameOrPlaceholder() || window.i18n('anonymous'); + convo?.getNicknameOrRealUsernameOrPlaceholder() || window.i18n('anonymous'); return { id: pubKey, @@ -612,7 +616,7 @@ class CompositionBoxInner extends Component { // eslint-disable-next-line more/no-then getPreview(firstLink, abortController.signal) .then(ret => { - // we finished loading the preview, and checking the abortConrtoller, we are still not aborted. + // we finished loading the preview, and checking the abortController, we are still not aborted. // => update the staged preview if (this.linkPreviewAbortController && !this.linkPreviewAbortController.signal.aborted) { this.setState({ @@ -675,7 +679,7 @@ class CompositionBoxInner extends Component { // eslint-disable-next-line no-param-reassign attachment.caption = caption; ToastUtils.pushToastInfo('saved', window.i18n.stripped('saved')); - // close the lightbox on save + // close the light box on save this.setState({ showCaptionEditor: undefined, }); @@ -822,10 +826,7 @@ class CompositionBoxInner extends Component { return; } - if ( - !selectedConversation.isPrivate && - (selectedConversation.left || selectedConversation.isKickedFromGroup) - ) { + if (!selectedConversation.isPrivate && selectedConversation.isKickedFromGroup) { ToastUtils.pushYouLeftTheGroup(); return; } @@ -850,7 +851,6 @@ class CompositionBoxInner extends Component { : undefined; try { - // this does not call call removeAllStagedAttachmentsInConvers const { attachments, previews } = await this.getFiles(linkPreview); this.props.sendMessage({ body: messagePlaintext.trim(), diff --git a/ts/components/conversation/composition/CompositionTextArea.tsx b/ts/components/conversation/composition/CompositionTextArea.tsx index 74907e8921..7439900bec 100644 --- a/ts/components/conversation/composition/CompositionTextArea.tsx +++ b/ts/components/conversation/composition/CompositionTextArea.tsx @@ -1,17 +1,17 @@ import { RefObject, useState } from 'react'; import { Mention, MentionsInput } from 'react-mentions'; -import { getConversationController } from '../../../session/conversations'; import { useSelectedConversationKey, useSelectedIsBlocked, + useSelectedIsGroupDestroyed, useSelectedIsKickedFromGroup, - useSelectedIsLeft, useSelectedNicknameOrProfileNameOrShortenedPubkey, } from '../../../state/selectors/selectedConversation'; import { updateDraftForConversation } from '../SessionConversationDrafts'; import { renderEmojiQuickResultRow, searchEmojiForQuery } from './EmojiQuickResult'; import { renderUserMentionRow, styleForCompositionBoxSuggestions } from './UserMentions'; import { HTMLDirection, useHTMLDirection } from '../../../util/i18n/rtlSupport'; +import { ConvoHub } from '../../../session/conversations'; const sendMessageStyle = (dir?: HTMLDirection) => { return { @@ -56,7 +56,7 @@ export const CompositionTextArea = (props: Props) => { const selectedConversationKey = useSelectedConversationKey(); const htmlDirection = useHTMLDirection(); const isKickedFromGroup = useSelectedIsKickedFromGroup(); - const left = useSelectedIsLeft(); + const isGroupDestroyed = useSelectedIsGroupDestroyed(); const isBlocked = useSelectedIsBlocked(); const groupName = useSelectedNicknameOrProfileNameOrShortenedPubkey(); @@ -65,12 +65,12 @@ export const CompositionTextArea = (props: Props) => { } const makeMessagePlaceHolderText = () => { + if (isGroupDestroyed) { + return window.i18n('groupDeletedMemberDescription', { group_name: groupName }); + } if (isKickedFromGroup) { return window.i18n('groupRemovedYou', { group_name: groupName }); } - if (left) { - return window.i18n('groupMemberYouLeft'); - } if (isBlocked) { return window.i18n('blockBlockedDescription'); } @@ -100,7 +100,7 @@ export const CompositionTextArea = (props: Props) => { Also, check for a message length change before firing it up, to avoid catching ESC, tab, or whatever which is not typing */ if (draft && draft.length && draft.length !== lastBumpTypingMessageLength) { - const conversationModel = getConversationController().get(selectedConversationKey); + const conversationModel = ConvoHub.use().get(selectedConversationKey); if (!conversationModel) { return; } diff --git a/ts/components/conversation/header/ConversationHeaderItems.tsx b/ts/components/conversation/header/ConversationHeaderItems.tsx index d8dbb88aba..39d05e8f7f 100644 --- a/ts/components/conversation/header/ConversationHeaderItems.tsx +++ b/ts/components/conversation/header/ConversationHeaderItems.tsx @@ -55,7 +55,7 @@ export const BackButton = (props: { onGoBack: () => void; showBackButton: boolea export const CallButton = () => { const isPrivate = useSelectedIsPrivate(); const isBlocked = useSelectedIsBlocked(); - const activeAt = useSelectedIsActive(); + const isActive = useSelectedIsActive(); const isMe = useSelectedIsNoteToSelf(); const selectedConvoKey = useSelectedConversationKey(); @@ -70,7 +70,7 @@ export const CallButton = () => { isMe || !selectedConvoKey || isBlocked || - !activeAt || + !isActive || !isPrivateAndFriend // call requires us to be friends ) { return null; diff --git a/ts/components/conversation/header/ConversationHeaderTitle.tsx b/ts/components/conversation/header/ConversationHeaderTitle.tsx index 196679fe9c..4da8d91eb0 100644 --- a/ts/components/conversation/header/ConversationHeaderTitle.tsx +++ b/ts/components/conversation/header/ConversationHeaderTitle.tsx @@ -12,7 +12,7 @@ import { useSelectedIsKickedFromGroup, useSelectedIsNoteToSelf, useSelectedIsPublic, - useSelectedMembers, + useSelectedMembersCount, useSelectedNicknameOrProfileNameOrShortenedPubkey, useSelectedNotificationSetting, useSelectedSubscriberCount, @@ -61,7 +61,7 @@ export const ConversationHeaderTitle = (props: ConversationHeaderTitleProps) => const isKickedFromGroup = useSelectedIsKickedFromGroup(); const isMe = useSelectedIsNoteToSelf(); const isGroup = useSelectedIsGroupOrCommunity(); - const members = useSelectedMembers(); + const selectedMembersCount = useSelectedMembersCount(); const expirationMode = useSelectedConversationDisappearingMode(); const disappearingMessageSubtitle = useDisappearingMessageSettingText({ @@ -85,7 +85,7 @@ export const ConversationHeaderTitle = (props: ConversationHeaderTitleProps) => if (isPublic) { count = subscriberCount || 0; } else { - count = members.length; + count = selectedMembersCount; } } @@ -94,7 +94,7 @@ export const ConversationHeaderTitle = (props: ConversationHeaderTitleProps) => } return null; - }, [i18n, isGroup, isKickedFromGroup, isPublic, members.length, subscriberCount]); + }, [i18n, isGroup, isKickedFromGroup, isPublic, selectedMembersCount, subscriberCount]); const handleRightPanelToggle = () => { if (isRightPanelOn) { diff --git a/ts/components/conversation/media-gallery/EmptyState.tsx b/ts/components/conversation/media-gallery/EmptyState.tsx index 5f49e28bec..20bf1f6556 100644 --- a/ts/components/conversation/media-gallery/EmptyState.tsx +++ b/ts/components/conversation/media-gallery/EmptyState.tsx @@ -2,16 +2,12 @@ * @prettier */ -import { Component } from 'react'; - interface Props { label: string; } -export class EmptyState extends Component { - public render() { - const { label } = this.props; +export const EmptyState = (props: Props) => { + const { label } = props; - return
{label}
; - } -} + return
{label}
; +}; diff --git a/ts/components/conversation/message/message-content/ClickToTrustSender.tsx b/ts/components/conversation/message/message-content/ClickToTrustSender.tsx index 41525875ce..a9cd1637ec 100644 --- a/ts/components/conversation/message/message-content/ClickToTrustSender.tsx +++ b/ts/components/conversation/message/message-content/ClickToTrustSender.tsx @@ -1,6 +1,6 @@ import styled from 'styled-components'; import { Data } from '../../../../data/data'; -import { getConversationController } from '../../../../session/conversations'; +import { ConvoHub } from '../../../../session/conversations'; import { AttachmentDownloads } from '../../../../session/utils'; import { updateConfirmModal } from '../../../../state/ducks/modalDialog'; import { useMessageAttachments } from '../../../../state/selectors'; @@ -39,7 +39,7 @@ export const ClickToTrustSender = (props: { messageId: string }) => { return; } const sender = found.getSource(); - const convo = getConversationController().get(sender); + const convo = ConvoHub.use().get(sender); window.inboxStore?.dispatch( updateConfirmModal({ title: window.i18n('attachmentsAutoDownloadModalTitle'), diff --git a/ts/components/conversation/message/message-content/MessageAvatar.tsx b/ts/components/conversation/message/message-content/MessageAvatar.tsx index 13e195a69c..9c472b1baf 100644 --- a/ts/components/conversation/message/message-content/MessageAvatar.tsx +++ b/ts/components/conversation/message/message-content/MessageAvatar.tsx @@ -4,7 +4,7 @@ import styled from 'styled-components'; import { OpenGroupData } from '../../../../data/opengroups'; import { MessageRenderingProps } from '../../../../models/messageType'; import { findCachedBlindedMatchOrLookItUp } from '../../../../session/apis/open_group_api/sogsv3/knownBlindedkeys'; -import { getConversationController } from '../../../../session/conversations'; +import { ConvoHub } from '../../../../session/conversations'; import { getSodiumRenderer } from '../../../../session/crypto'; import { KeyPrefixType, PubKey } from '../../../../session/types'; import { openConversationWithMessages } from '../../../../state/ducks/conversations'; @@ -64,8 +64,8 @@ export const MessageAvatar = (props: Props) => { if (isPublic && !PubKey.isBlinded(sender)) { // public chat but account id not blinded. disable showing user details if we do not have an active convo with that user. // an unactive convo with that user means that we never chatted with that id directyly, but only through a sogs - const convoWithSender = getConversationController().get(sender); - if (!convoWithSender || !convoWithSender.get('active_at')) { + const convoWithSender = ConvoHub.use().get(sender); + if (!convoWithSender || !convoWithSender.getActiveAt()) { // for some time, we might still get some unblinded messages, as in message sent unblinded because // * older clients still send unblinded message and those are allowed by sogs if they doesn't enforce blinding // * new clients still send unblinded message and those are allowed by sogs if it doesn't enforce blinding @@ -89,7 +89,7 @@ export const MessageAvatar = (props: Props) => { return; } - const convoOpen = getConversationController().get(selectedConvoKey); + const convoOpen = ConvoHub.use().get(selectedConvoKey); const room = OpenGroupData.getV2OpenGroupRoom(convoOpen.id); let privateConvoToOpen = sender; if (room?.serverPublicKey) { @@ -102,9 +102,7 @@ export const MessageAvatar = (props: Props) => { privateConvoToOpen = foundRealSessionId || privateConvoToOpen; } - await getConversationController() - .get(privateConvoToOpen) - .setOriginConversationID(selectedConvoKey); + await ConvoHub.use().get(privateConvoToOpen).setOriginConversationID(selectedConvoKey, true); // public and blinded key for that message, we should open the convo as is and see if the user wants // to send a sogs blinded message request. diff --git a/ts/components/conversation/message/message-content/MessageContent.tsx b/ts/components/conversation/message/message-content/MessageContent.tsx index 10906b734d..6878645e45 100644 --- a/ts/components/conversation/message/message-content/MessageContent.tsx +++ b/ts/components/conversation/message/message-content/MessageContent.tsx @@ -1,6 +1,6 @@ import classNames from 'classnames'; import { isEmpty } from 'lodash'; -import { MouseEvent, useCallback, useLayoutEffect, useState } from 'react'; +import { useCallback, useLayoutEffect, useState } from 'react'; import { InView } from 'react-intersection-observer'; import { useSelector } from 'react-redux'; import styled from 'styled-components'; @@ -37,23 +37,6 @@ type Props = { messageId: string; }; -// TODO not too sure what is this doing? It is not preventDefault() -// or stopPropagation() so I think this is never cancelling a click event? -function onClickOnMessageInnerContainer(event: MouseEvent) { - const selection = window.getSelection(); - // Text is being selected - if (selection && selection.type === 'Range') { - return; - } - - // User clicked on message body - const target = event.target as HTMLDivElement; - if (target.className === 'text-selectable' || window.contextMenuShown) { - // eslint-disable-next-line no-useless-return - return; - } -} - const StyledMessageContent = styled.div<{ msgDirection: MessageModelType }>` display: flex; align-self: ${props => (props.msgDirection === 'incoming' ? 'flex-start' : 'flex-end')}; @@ -156,7 +139,6 @@ export const MessageContent = (props: Props) => { diff --git a/ts/components/conversation/message/message-content/MessageContentWithStatus.tsx b/ts/components/conversation/message/message-content/MessageContentWithStatus.tsx index 5c2f3fba90..16e8aba018 100644 --- a/ts/components/conversation/message/message-content/MessageContentWithStatus.tsx +++ b/ts/components/conversation/message/message-content/MessageContentWithStatus.tsx @@ -1,5 +1,5 @@ import classNames from 'classnames'; -import { MouseEvent, useCallback, useState } from 'react'; +import { SessionDataTestId, MouseEvent, useCallback, useState } from 'react'; import { useDispatch, useSelector } from 'react-redux'; import styled from 'styled-components'; import { useIsDetailMessageView } from '../../../../contexts/isDetailViewContext'; @@ -30,7 +30,7 @@ export type MessageContentWithStatusSelectorProps = { isGroup: boolean } & Pick< type Props = { messageId: string; ctxMenuID: string; - dataTestId: string; + dataTestId: SessionDataTestId; enableReactions: boolean; }; diff --git a/ts/components/conversation/message/message-content/MessageStatus.tsx b/ts/components/conversation/message/message-content/MessageStatus.tsx index c366109e77..eb7a806c93 100644 --- a/ts/components/conversation/message/message-content/MessageStatus.tsx +++ b/ts/components/conversation/message/message-content/MessageStatus.tsx @@ -1,3 +1,4 @@ +import { SessionDataTestId } from 'react'; import { useSelector } from 'react-redux'; import styled from 'styled-components'; import { useMessageExpirationPropsById } from '../../../../hooks/useParamSelector'; @@ -13,7 +14,7 @@ import { saveLogToDesktop } from '../../../../util/logging'; type Props = { messageId: string; - dataTestId?: string | undefined; + dataTestId: SessionDataTestId; }; /** diff --git a/ts/components/conversation/message/message-content/MessageText.tsx b/ts/components/conversation/message/message-content/MessageText.tsx index 29ba158de6..4eaf838577 100644 --- a/ts/components/conversation/message/message-content/MessageText.tsx +++ b/ts/components/conversation/message/message-content/MessageText.tsx @@ -29,7 +29,7 @@ export const MessageText = (props: Props) => { } const { text, isDeleted, conversationType } = selected; - const contents = isDeleted ? window.i18n('deleteMessageDeleted', { count: 1 }) : text?.trim(); + const contents = isDeleted ? window.i18n('deleteMessageDeletedGlobally') : text?.trim(); if (!contents) { return null; diff --git a/ts/components/conversation/message/message-item/ExpirableReadableMessage.tsx b/ts/components/conversation/message/message-item/ExpirableReadableMessage.tsx index 6047b4f4cb..ff4819f16c 100644 --- a/ts/components/conversation/message/message-item/ExpirableReadableMessage.tsx +++ b/ts/components/conversation/message/message-item/ExpirableReadableMessage.tsx @@ -7,7 +7,7 @@ import { useIsDetailMessageView } from '../../../../contexts/isDetailViewContext import { Data } from '../../../../data/data'; import { useMessageExpirationPropsById } from '../../../../hooks/useParamSelector'; import { MessageModelType } from '../../../../models/messageType'; -import { getConversationController } from '../../../../session/conversations'; +import { ConvoHub } from '../../../../session/conversations'; import { PropsForExpiringMessage, messagesExpired } from '../../../../state/ducks/conversations'; import { getIncrement } from '../../../../util/timer'; import { ExpireTimer } from '../../ExpireTimer'; @@ -51,7 +51,7 @@ function useIsExpired( }, ]) ); - const convo = getConversationController().get(convoId); + const convo = ConvoHub.use().get(convoId); convo?.updateLastMessage(); } } diff --git a/ts/components/conversation/message/message-item/GenericReadableMessage.tsx b/ts/components/conversation/message/message-item/GenericReadableMessage.tsx index 5822680da8..8e5f52bda4 100644 --- a/ts/components/conversation/message/message-item/GenericReadableMessage.tsx +++ b/ts/components/conversation/message/message-item/GenericReadableMessage.tsx @@ -6,7 +6,7 @@ import { useSelector } from 'react-redux'; import styled, { keyframes } from 'styled-components'; import { useIsDetailMessageView } from '../../../../contexts/isDetailViewContext'; import { MessageRenderingProps } from '../../../../models/messageType'; -import { getConversationController } from '../../../../session/conversations'; +import { ConvoHub } from '../../../../session/conversations'; import { StateType } from '../../../../state/reducer'; import { useMessageSelected } from '../../../../state/selectors'; import { @@ -115,7 +115,7 @@ export const GenericReadableMessage = (props: Props) => { useEffect(() => { if (msgProps?.convoId) { - const conversationModel = getConversationController().get(msgProps?.convoId); + const conversationModel = ConvoHub.use().get(msgProps?.convoId); if (conversationModel) { setEnableReactions(conversationModel.hasReactions()); } diff --git a/ts/components/conversation/message/message-item/GroupUpdateMessage.tsx b/ts/components/conversation/message/message-item/GroupUpdateMessage.tsx index ec8bc681af..5415650444 100644 --- a/ts/components/conversation/message/message-item/GroupUpdateMessage.tsx +++ b/ts/components/conversation/message/message-item/GroupUpdateMessage.tsx @@ -1,29 +1,59 @@ +import { PubkeyType } from 'libsession_util_nodejs'; import { isNull } from 'lodash'; import { + getGroupNameChangeStr, getJoinedGroupUpdateChangeStr, getKickedGroupUpdateStr, getLeftGroupUpdateChangeStr, + getPromotedGroupUpdateChangeStr, } from '../../../../models/groupUpdate'; +import { PreConditionFailed } from '../../../../session/utils/errors'; import { PropsForGroupUpdate, PropsForGroupUpdateType, } from '../../../../state/ducks/conversations'; -import { useSelectedNicknameOrProfileNameOrShortenedPubkey } from '../../../../state/selectors/selectedConversation'; +import { + useSelectedIsGroupV2, + useSelectedNicknameOrProfileNameOrShortenedPubkey, +} from '../../../../state/selectors/selectedConversation'; +import type { LocalizerComponentPropsObject } from '../../../../types/localizer'; +import { Localizer } from '../../../basic/Localizer'; import { ExpirableReadableMessage } from './ExpirableReadableMessage'; import { NotificationBubble } from './notification-bubble/NotificationBubble'; -import { Localizer } from '../../../basic/Localizer'; -import type { LocalizerComponentPropsObject } from '../../../../types/localizer'; // This component is used to display group updates in the conversation view. -const ChangeItemJoined = (added: Array): LocalizerComponentPropsObject => { +const ChangeItemPromoted = (promoted: Array): LocalizerComponentPropsObject => { + if (!promoted.length) { + throw new Error('Group update promoted is missing contacts'); + } + const isGroupV2 = useSelectedIsGroupV2(); + + if (isGroupV2) { + return getPromotedGroupUpdateChangeStr(promoted); + } + throw new PreConditionFailed('ChangeItemPromoted only applies to groupv2'); +}; + +const ChangeItemAvatar = (): LocalizerComponentPropsObject => { + const isGroupV2 = useSelectedIsGroupV2(); + if (isGroupV2) { + return { token: 'groupDisplayPictureUpdated' }; + } + throw new PreConditionFailed('ChangeItemAvatar only applies to groupv2'); +}; + +const ChangeItemJoined = ( + added: Array, + withHistory: boolean +): LocalizerComponentPropsObject => { const groupName = useSelectedNicknameOrProfileNameOrShortenedPubkey(); + const isGroupV2 = useSelectedIsGroupV2(); if (!added.length) { throw new Error('Group update added is missing details'); } - - return getJoinedGroupUpdateChangeStr(added, groupName); + return getJoinedGroupUpdateChangeStr(added, isGroupV2, withHistory, groupName); }; const ChangeItemKicked = (kicked: Array): LocalizerComponentPropsObject => { @@ -36,31 +66,29 @@ const ChangeItemKicked = (kicked: Array): LocalizerComponentPropsObject }; const ChangeItemLeft = (left: Array): LocalizerComponentPropsObject => { - const groupName = useSelectedNicknameOrProfileNameOrShortenedPubkey(); - if (!left.length) { throw new Error('Group update left is missing details'); } - return getLeftGroupUpdateChangeStr(left, groupName); + return getLeftGroupUpdateChangeStr(left); }; const ChangeItem = (change: PropsForGroupUpdateType): LocalizerComponentPropsObject => { const { type } = change; + switch (type) { case 'name': - return { token: 'groupNameNew', args: { group_name: change.newName } }; - + return getGroupNameChangeStr(change.newName); case 'add': - return ChangeItemJoined(change.added); - + return ChangeItemJoined(change.added, change.withHistory); case 'left': return ChangeItemLeft(change.left); - case 'kicked': return ChangeItemKicked(change.kicked); - - case 'general': + case 'promoted': + return ChangeItemPromoted(change.promoted); + case 'avatarChange': + return ChangeItemAvatar(); default: return { token: 'groupUpdated' }; } diff --git a/ts/components/conversation/message/message-item/ReadableMessage.tsx b/ts/components/conversation/message/message-item/ReadableMessage.tsx index 39491913f3..096be329e7 100644 --- a/ts/components/conversation/message/message-item/ReadableMessage.tsx +++ b/ts/components/conversation/message/message-item/ReadableMessage.tsx @@ -1,5 +1,6 @@ import { debounce, noop } from 'lodash'; import { + SessionDataTestId, AriaRole, MouseEvent, MouseEventHandler, @@ -13,7 +14,7 @@ import { useDispatch, useSelector } from 'react-redux'; import { useScrollToLoadedMessage } from '../../../../contexts/ScrollToLoadedMessage'; import { Data } from '../../../../data/data'; import { useHasUnread } from '../../../../hooks/useParamSelector'; -import { getConversationController } from '../../../../session/conversations'; +import { ConvoHub } from '../../../../session/conversations'; import { fetchBottomMessagesForConversation, fetchTopMessagesForConversation, @@ -39,8 +40,8 @@ export type ReadableMessageProps = { isUnread: boolean; onClick?: MouseEventHandler; onDoubleClickCapture?: MouseEventHandler; + dataTestId: SessionDataTestId; role?: AriaRole; - dataTestId: string; onContextMenu?: (e: MouseEvent) => void; isControlMessage?: boolean; }; @@ -126,7 +127,7 @@ export const ReadableMessage = (props: ReadableMessageProps) => { // make sure the app is focused, because we mark message as read here if (inView === true && isAppFocused) { dispatch(showScrollToBottomButton(false)); - getConversationController() + ConvoHub.use() .get(selectedConversationKey) ?.markConversationRead({ newestUnreadDate: receivedAt || 0, fromConfigMessage: false }); // TODOLATER this should be `sentAt || serverTimestamp` I think @@ -157,7 +158,7 @@ export const ReadableMessage = (props: ReadableMessageProps) => { // mark the whole conversation as read until this point. // this will trigger the expire timer. if (foundSentAt) { - getConversationController() + ConvoHub.use() .get(selectedConversationKey) ?.markConversationRead({ newestUnreadDate: foundSentAt, fromConfigMessage: false }); } diff --git a/ts/components/conversation/right-panel/overlay/OverlayRightPanelSettings.tsx b/ts/components/conversation/right-panel/overlay/OverlayRightPanelSettings.tsx index 1b4d5daf1e..6db70ec2a7 100644 --- a/ts/components/conversation/right-panel/overlay/OverlayRightPanelSettings.tsx +++ b/ts/components/conversation/right-panel/overlay/OverlayRightPanelSettings.tsx @@ -21,16 +21,20 @@ import { showUpdateGroupNameByConvoId, } from '../../../../interactions/conversationInteractions'; import { Constants } from '../../../../session'; +import { PubKey } from '../../../../session/types'; +import { hasClosedGroupV2QAButtons } from '../../../../shared/env_vars'; import { closeRightPanel } from '../../../../state/ducks/conversations'; +import { groupInfoActions } from '../../../../state/ducks/metaGroups'; import { resetRightOverlayMode, setRightOverlayMode } from '../../../../state/ducks/section'; import { useSelectedConversationKey, useSelectedDisplayNameInProfile, useSelectedIsActive, useSelectedIsBlocked, + useSelectedIsGroupDestroyed, useSelectedIsGroupOrCommunity, + useSelectedIsGroupV2, useSelectedIsKickedFromGroup, - useSelectedIsLeft, useSelectedIsPublic, useSelectedLastMessage, useSelectedSubscriberCount, @@ -128,15 +132,22 @@ const HeaderItem = () => { const dispatch = useDispatch(); const isBlocked = useSelectedIsBlocked(); const isKickedFromGroup = useSelectedIsKickedFromGroup(); - const left = useSelectedIsLeft(); + const isGroupDestroyed = useSelectedIsGroupDestroyed(); const isGroup = useSelectedIsGroupOrCommunity(); + const isGroupV2 = useSelectedIsGroupV2(); + const isPublic = useSelectedIsPublic(); const subscriberCount = useSelectedSubscriberCount(); + const weAreAdmin = useSelectedWeAreAdmin(); if (!selectedConvoKey) { return null; } - const showInviteContacts = isGroup && !isKickedFromGroup && !isBlocked && !left; + const showInviteLegacyGroup = + !isPublic && !isGroupV2 && isGroup && !isKickedFromGroup && !isBlocked; + const showInviteGroupV2 = + isGroupV2 && !isKickedFromGroup && !isBlocked && weAreAdmin && !isGroupDestroyed; + const showInviteContacts = isPublic || showInviteLegacyGroup || showInviteGroupV2; const showMemberCount = !!(subscriberCount && subscriberCount > 0); return ( @@ -201,8 +212,8 @@ export const OverlayRightPanelSettings = () => { const isActive = useSelectedIsActive(); const isBlocked = useSelectedIsBlocked(); const isKickedFromGroup = useSelectedIsKickedFromGroup(); - const left = useSelectedIsLeft(); const isGroup = useSelectedIsGroupOrCommunity(); + const isGroupV2 = useSelectedIsGroupV2(); const isPublic = useSelectedIsPublic(); const weAreAdmin = useSelectedWeAreAdmin(); const disappearingMessagesSubtitle = useDisappearingMessageSettingText({ @@ -256,7 +267,7 @@ export const OverlayRightPanelSettings = () => { return null; } - const commonNoShow = isKickedFromGroup || left || isBlocked || !isActive; + const commonNoShow = isKickedFromGroup || isBlocked || !isActive; const hasDisappearingMessages = !isPublic && !commonNoShow; const leaveGroupString = isPublic ? window.i18n('communityLeave') @@ -267,16 +278,14 @@ export const OverlayRightPanelSettings = () => { ? window.i18n('groupRemovedYou', { group_name: selectedUsername || window.i18n('groupUnknown'), }) - : left - ? window.i18n('groupMemberYouLeft') - : window.i18n('groupLeave'); + : window.i18n('groupLeave'); const showUpdateGroupNameButton = isGroup && weAreAdmin && !commonNoShow; // legacy groups non-admin cannot change groupname anymore const showAddRemoveModeratorsButton = weAreAdmin && !commonNoShow && isPublic; const showUpdateGroupMembersButton = !isPublic && isGroup && !commonNoShow; - const deleteConvoAction = async () => { - await showLeaveGroupByConvoId(selectedConvoKey, selectedUsername); + const deleteConvoAction = () => { + void showLeaveGroupByConvoId(selectedConvoKey, selectedUsername); }; return ( @@ -295,6 +304,56 @@ export const OverlayRightPanelSettings = () => { /> )} + {hasClosedGroupV2QAButtons() && isGroupV2 ? ( + <> + { + if (!PubKey.is03Pubkey(selectedConvoKey)) { + throw new Error('triggerFakeAvatarUpdate needs a 03 pubkey'); + } + window.inboxStore?.dispatch( + groupInfoActions.triggerFakeAvatarUpdate({ groupPk: selectedConvoKey }) as any + ); + }} + dataTestId="edit-group-name" + /> + { + if (!PubKey.is03Pubkey(selectedConvoKey)) { + throw new Error('We need a 03 pubkey'); + } + window.inboxStore?.dispatch( + groupInfoActions.triggerFakeDeleteMsgBeforeNow({ + groupPk: selectedConvoKey, + messagesWithAttachmentsOnly: false, + }) as any + ); + }} + dataTestId="edit-group-name" + /> + { + if (!PubKey.is03Pubkey(selectedConvoKey)) { + throw new Error('We need a 03 pubkey'); + } + window.inboxStore?.dispatch( + groupInfoActions.triggerFakeDeleteMsgBeforeNow({ + groupPk: selectedConvoKey, + messagesWithAttachmentsOnly: true, + }) as any + ); + }} + dataTestId="edit-group-name" + /> + + ) : null} + {showAddRemoveModeratorsButton && ( <> { {isGroup && ( - void deleteConvoAction()} - color={'var(--danger-color)'} - iconType={'delete'} - /> + <> + void deleteConvoAction()} + color={'var(--danger-color)'} + iconType={'delete'} + /> + )} diff --git a/ts/components/conversation/right-panel/overlay/disappearing-messages/DisappearingModes.tsx b/ts/components/conversation/right-panel/overlay/disappearing-messages/DisappearingModes.tsx index e683284d7f..4ff48cdd4e 100644 --- a/ts/components/conversation/right-panel/overlay/disappearing-messages/DisappearingModes.tsx +++ b/ts/components/conversation/right-panel/overlay/disappearing-messages/DisappearingModes.tsx @@ -1,20 +1,20 @@ +import { SessionDataTestId } from 'react'; import { DisappearingMessageConversationModeType } from '../../../../../session/disappearing_messages/types'; import { Localizer } from '../../../../basic/Localizer'; import { PanelButtonGroup, PanelLabel } from '../../../../buttons/PanelButton'; import { PanelRadioButton } from '../../../../buttons/PanelRadioButton'; -function loadDataTestId(mode: DisappearingMessageConversationModeType) { - const dataTestId = 'disappear-%-option'; +function toDataTestId(mode: DisappearingMessageConversationModeType): SessionDataTestId { switch (mode) { case 'legacy': - return dataTestId.replace('%', 'legacy'); + return 'disappear-legacy-option'; case 'deleteAfterRead': - return dataTestId.replace('%', 'after-read'); + return 'disappear-after-read-option'; case 'deleteAfterSend': - return dataTestId.replace('%', 'after-send'); + return 'disappear-after-send-option'; case 'off': default: - return dataTestId.replace('%', 'off'); + return 'disappear-off-option'; } } @@ -69,7 +69,7 @@ export const DisappearingModes = (props: DisappearingModesProps) => { setSelected(mode); }} disabled={options[mode]} - dataTestId={loadDataTestId(mode)} + dataTestId={toDataTestId(mode)} /> ); })} diff --git a/ts/components/conversation/right-panel/overlay/disappearing-messages/TimeOptions.tsx b/ts/components/conversation/right-panel/overlay/disappearing-messages/TimeOptions.tsx index ecb8d94b5a..3ffcb1bdfe 100644 --- a/ts/components/conversation/right-panel/overlay/disappearing-messages/TimeOptions.tsx +++ b/ts/components/conversation/right-panel/overlay/disappearing-messages/TimeOptions.tsx @@ -39,7 +39,7 @@ export const TimeOptions = (props: TimerOptionsProps) => { setSelected(option.value); }} disabled={disabled} - dataTestId={`time-option-${option.name.replace(' ', '-')}`} // we want "time-option-1-minute", etc as accessibility id + dataTestId={`time-option-${option.value}`} // we want "time-option-3600", etc as accessibility id /> ); })} diff --git a/ts/components/dialog/BanOrUnbanUserDialog.tsx b/ts/components/dialog/BanOrUnbanUserDialog.tsx index 0411d503b8..8f0fa60328 100644 --- a/ts/components/dialog/BanOrUnbanUserDialog.tsx +++ b/ts/components/dialog/BanOrUnbanUserDialog.tsx @@ -2,13 +2,13 @@ import { ChangeEvent, useRef, useState } from 'react'; import { useDispatch } from 'react-redux'; import { useFocusMount } from '../../hooks/useFocusMount'; -import { useConversationPropsById } from '../../hooks/useParamSelector'; +import { useConversationUsername } from '../../hooks/useParamSelector'; import { ConversationModel } from '../../models/conversation'; import { sogsV3BanUser, sogsV3UnbanUser, } from '../../session/apis/open_group_api/sogsv3/sogsV3BanUnban'; -import { getConversationController } from '../../session/conversations/ConversationController'; +import { ConvoHub } from '../../session/conversations/ConversationController'; import { PubKey } from '../../session/types'; import { ToastUtils } from '../../session/utils'; import { BanType, updateBanOrUnbanUserModal } from '../../state/ducks/modalDialog'; @@ -69,20 +69,17 @@ export const BanOrUnBanUserDialog = (props: { const isBan = banType === 'ban'; const dispatch = useDispatch(); const isDarkTheme = useIsDarkTheme(); - const convo = getConversationController().get(conversationId); + const convo = ConvoHub.use().get(conversationId); const inputRef = useRef(null); useFocusMount(inputRef, true); - const wasGivenAPubkey = Boolean(pubkey?.length); const [inputBoxValue, setInputBoxValue] = useState(''); const [inProgress, setInProgress] = useState(false); - const sourceConvoProps = useConversationPropsById(pubkey); + const displayName = useConversationUsername(pubkey); const inputTextToDisplay = - wasGivenAPubkey && sourceConvoProps - ? `${sourceConvoProps.displayNameInProfile} ${PubKey.shorten(sourceConvoProps.id)}` - : undefined; + !!pubkey && displayName ? `${displayName} ${PubKey.shorten(pubkey)}` : undefined; /** * Ban or Unban a user from an open group @@ -97,7 +94,7 @@ export const BanOrUnBanUserDialog = (props: { if (isBanned) { // clear input box setInputBoxValue(''); - if (wasGivenAPubkey) { + if (pubkey) { dispatch(updateBanOrUnbanUserModal(null)); } } @@ -136,8 +133,8 @@ export const BanOrUnBanUserDialog = (props: { placeholder={i18n('accountIdEnter')} dir="auto" onChange={onPubkeyBoxChanges} - disabled={inProgress || wasGivenAPubkey} - value={wasGivenAPubkey ? inputTextToDisplay : inputBoxValue} + disabled={inProgress || !!pubkey} + value={pubkey ? inputTextToDisplay : inputBoxValue} /> void; }) => { const { deleteMode, setDeleteMode } = props; + + const items = [ + { + label: window.i18n('clearDeviceOnly'), + value: DEVICE_ONLY, + }, + { + label: window.i18n('clearDeviceAndNetwork'), + value: DEVICE_AND_NETWORK, + }, + ].map(m => ({ + ...m, + inputDataTestId: `input-${m.value}` as const, + labelDataTestId: `label-${m.value}` as const, + })); + return ( <> @@ -38,10 +54,7 @@ const DescriptionBeforeAskingConfirmation = (props: { setDeleteMode(value); } }} - items={[ - { label: window.i18n('clearDeviceOnly'), value: DEVICE_ONLY }, - { label: window.i18n('clearDeviceAndNetwork'), value: 'device_and_network' }, - ]} + items={items} /> ); diff --git a/ts/components/dialog/HideRecoveryPasswordDialog.tsx b/ts/components/dialog/HideRecoveryPasswordDialog.tsx index 5a34300b15..c88d582e9e 100644 --- a/ts/components/dialog/HideRecoveryPasswordDialog.tsx +++ b/ts/components/dialog/HideRecoveryPasswordDialog.tsx @@ -46,12 +46,12 @@ export function HideRecoveryPasswordDialog(props: HideRecoveryPasswordDialogProp onClick: () => { dispatch(updateHideRecoveryPasswordModal({ state: 'secondWarning' })); }, - dataTestId: 'session-confirm-ok-button', + dataTestId: 'session-confirm-ok-button' as const, } : { text: window.i18n('cancel'), onClick: onClose, - dataTestId: 'session-confirm-cancel-button', + dataTestId: 'session-confirm-cancel-button' as const, }; const rightButtonProps = @@ -59,7 +59,7 @@ export function HideRecoveryPasswordDialog(props: HideRecoveryPasswordDialogProp ? { text: window.i18n('cancel'), onClick: onClose, - dataTestId: 'session-confirm-cancel-button', + dataTestId: 'session-confirm-cancel-button' as const, } : { text: window.i18n('yes'), @@ -67,7 +67,7 @@ export function HideRecoveryPasswordDialog(props: HideRecoveryPasswordDialogProp onClick: () => { void onConfirmation(); }, - dataTestId: 'session-confirm-ok-button', + dataTestId: 'session-confirm-ok-button' as const, }; return ( diff --git a/ts/components/dialog/InviteContactsDialog.tsx b/ts/components/dialog/InviteContactsDialog.tsx index d396811de0..883a9a938c 100644 --- a/ts/components/dialog/InviteContactsDialog.tsx +++ b/ts/components/dialog/InviteContactsDialog.tsx @@ -1,21 +1,36 @@ +import { useState } from 'react'; import useKey from 'react-use/lib/useKey'; -import _ from 'lodash'; -import { useDispatch, useSelector } from 'react-redux'; +import { PubkeyType } from 'libsession_util_nodejs'; +import _, { difference, uniq } from 'lodash'; +import { useDispatch } from 'react-redux'; import { VALIDATION } from '../../session/constants'; -import { getConversationController } from '../../session/conversations'; +import { ConvoHub } from '../../session/conversations'; import { ToastUtils, UserUtils } from '../../session/utils'; import { updateInviteContactModal } from '../../state/ducks/modalDialog'; import { SpacerLG } from '../basic/Text'; -import { useConversationPropsById } from '../../hooks/useParamSelector'; +import { + useIsPrivate, + useIsPublic, + useSortedGroupMembers, + useZombies, +} from '../../hooks/useParamSelector'; import { useSet } from '../../hooks/useSet'; -import { initiateClosedGroupUpdate } from '../../session/group/closed-group'; +import { ClosedGroup } from '../../session/group/closed-group'; +import { PubKey } from '../../session/types'; import { SessionUtilUserGroups } from '../../session/utils/libsession/libsession_utils_user_groups'; -import { getPrivateContactsPubkeys } from '../../state/selectors/conversations'; +import { groupInfoActions } from '../../state/ducks/metaGroups'; +import { useContactsToInviteToGroup } from '../../state/selectors/conversations'; +import { useMemberGroupChangePending } from '../../state/selectors/groups'; +import { useSelectedIsGroupV2 } from '../../state/selectors/selectedConversation'; import { MemberListItem } from '../MemberListItem'; import { SessionWrapperModal } from '../SessionWrapperModal'; import { SessionButton, SessionButtonColor, SessionButtonType } from '../basic/SessionButton'; +import { SessionSpinner } from '../loading'; +import { SessionToggle } from '../basic/SessionToggle'; +import { GroupInviteRequiredVersionBanner } from '../NoticeBanner'; +import { isDevProd } from '../../shared/env_vars'; import { ConversationTypeEnum } from '../../models/types'; import { Localizer } from '../basic/Localizer'; @@ -24,7 +39,7 @@ type Props = { }; async function submitForOpenGroup(convoId: string, pubkeys: Array) { - const convo = getConversationController().get(convoId); + const convo = ConvoHub.use().get(convoId); if (!convo || !convo.isPublic()) { throw new Error('submitForOpenGroup group not found'); } @@ -39,7 +54,7 @@ async function submitForOpenGroup(convoId: string, pubkeys: Array) { }; // eslint-disable-next-line @typescript-eslint/no-misused-promises pubkeys.forEach(async pubkeyStr => { - const privateConvo = await getConversationController().getOrCreateAndWait( + const privateConvo = await ConvoHub.use().getOrCreateAndWait( pubkeyStr, ConversationTypeEnum.PRIVATE ); @@ -60,20 +75,20 @@ async function submitForOpenGroup(convoId: string, pubkeys: Array) { } const submitForClosedGroup = async (convoId: string, pubkeys: Array) => { - const convo = getConversationController().get(convoId); + const convo = ConvoHub.use().get(convoId); if (!convo || !convo.isGroup()) { throw new Error('submitForClosedGroup group not found'); } // closed group chats const ourPK = UserUtils.getOurPubKeyStrFromCache(); // we only care about real members. If a member is currently a zombie we have to be able to add him back - let existingMembers = convo.get('members') || []; + let existingMembers = convo.getGroupMembers() || []; // at least make sure it's an array if (!Array.isArray(existingMembers)) { existingMembers = []; } existingMembers = _.compact(existingMembers); - const existingZombies = convo.get('zombies') || []; + const existingZombies = convo.getGroupZombies() || []; const newMembers = pubkeys.filter(d => !existingMembers.includes(d)); if (newMembers.length > 0) { @@ -93,7 +108,7 @@ const submitForClosedGroup = async (convoId: string, pubkeys: Array) => const groupId = convo.get('id'); const groupName = convo.getNicknameOrRealUsernameOrPlaceholder(); - await initiateClosedGroupUpdate(groupId, groupName, uniqMembers); + await ClosedGroup.initiateClosedGroupUpdate(groupId, groupName, uniqMembers); } }; @@ -101,29 +116,28 @@ const InviteContactsDialogInner = (props: Props) => { const { conversationId } = props; const dispatch = useDispatch(); - const privateContactPubkeys = useSelector(getPrivateContactsPubkeys); - let validContactsForInvite = _.clone(privateContactPubkeys); + const privateContactPubkeys = useContactsToInviteToGroup() as Array; - const convoProps = useConversationPropsById(conversationId); + const isProcessingUIChange = useMemberGroupChangePending(); + + const isPrivate = useIsPrivate(conversationId); + const isPublic = useIsPublic(conversationId); + const membersFromRedux = useSortedGroupMembers(conversationId) || []; + const zombiesFromRedux = useZombies(conversationId) || []; + const isGroupV2 = useSelectedIsGroupV2(); + const [shareHistory, setShareHistory] = useState(false); const { uniqueValues: selectedContacts, addTo, removeFrom } = useSet(); - if (!convoProps) { - throw new Error('InviteContactsDialogInner not a valid convoId given'); - } - if (convoProps.isPrivate) { + if (isPrivate) { throw new Error('InviteContactsDialogInner must be a group'); } - if (!convoProps.isPublic) { - // filter our zombies and current members from the list of contact we can add - const members = convoProps.members || []; - const zombies = convoProps.zombies || []; - validContactsForInvite = validContactsForInvite.filter( - d => !members.includes(d) && !zombies.includes(d) - ); - } + const zombiesAndMembers = uniq([...membersFromRedux, ...zombiesFromRedux]); + // filter our zombies and current members from the list of contact we can add - const isPublicConvo = convoProps.isPublic; + const validContactsForInvite = isPublic + ? privateContactPubkeys + : difference(privateContactPubkeys, zombiesAndMembers); const closeDialog = () => { dispatch(updateInviteContactModal(null)); @@ -131,9 +145,21 @@ const InviteContactsDialogInner = (props: Props) => { const onClickOK = () => { if (selectedContacts.length > 0) { - if (isPublicConvo) { + if (isPublic) { void submitForOpenGroup(conversationId, selectedContacts); } else { + if (PubKey.is03Pubkey(conversationId)) { + const forcedAsPubkeys = selectedContacts as Array; + const action = groupInfoActions.currentDeviceGroupMembersChange({ + addMembersWithoutHistory: shareHistory ? [] : forcedAsPubkeys, + addMembersWithHistory: shareHistory ? forcedAsPubkeys : [], + removeMembers: [], + groupPk: conversationId, + alsoRemoveMessages: false, + }); + dispatch(action as any); + return; + } void submitForClosedGroup(conversationId, selectedContacts); } } @@ -157,8 +183,19 @@ const InviteContactsDialogInner = (props: Props) => { return ( + {hasContacts && isGroupV2 && } + + {/* TODO: localize those strings once out releasing those buttons for real Remove after QA */} + {isGroupV2 && isDevProd() && ( + <> + + Share History?{' '} + setShareHistory(!shareHistory)} /> + + + )}
{hasContacts ? ( validContactsForInvite.map((member: string) => ( @@ -182,12 +219,13 @@ const InviteContactsDialogInner = (props: Props) => { )}
- + +
{ buttonColor={SessionButtonColor.Danger} buttonType={SessionButtonType.Simple} onClick={closeDialog} + disabled={isProcessingUIChange} />
diff --git a/ts/components/dialog/ModeratorsAddDialog.tsx b/ts/components/dialog/ModeratorsAddDialog.tsx index a6d7c20fba..1bdf22bf6c 100644 --- a/ts/components/dialog/ModeratorsAddDialog.tsx +++ b/ts/components/dialog/ModeratorsAddDialog.tsx @@ -2,9 +2,9 @@ import { useState } from 'react'; import { useDispatch } from 'react-redux'; import { sogsV3AddAdmin } from '../../session/apis/open_group_api/sogsv3/sogsV3AddRemoveMods'; -import { getConversationController } from '../../session/conversations'; import { PubKey } from '../../session/types'; import { ToastUtils } from '../../session/utils'; +import { ConvoHub } from '../../session/conversations'; import { updateAddModeratorsModal } from '../../state/ducks/modalDialog'; import { useIsDarkTheme } from '../../state/selectors/theme'; import { SessionHeaderSearchInput } from '../SessionHeaderSearchInput'; @@ -22,7 +22,7 @@ export const AddModeratorsDialog = (props: Props) => { const dispatch = useDispatch(); const isDarkTheme = useIsDarkTheme(); - const convo = getConversationController().get(conversationId); + const convo = ConvoHub.use().get(conversationId); const [inputBoxValue, setInputBoxValue] = useState(''); const [addingInProgress, setAddingInProgress] = useState(false); @@ -51,7 +51,7 @@ export const AddModeratorsDialog = (props: Props) => { ToastUtils.pushFailedToAddAsModerator(); } else { const userDisplayName = - getConversationController().get(pubkey.key)?.getNicknameOrRealUsernameOrPlaceholder() || + ConvoHub.use().get(pubkey.key)?.getNicknameOrRealUsernameOrPlaceholder() || window.i18n('unknown'); window?.log?.info(`${pubkey.key} added as moderator...`); ToastUtils.pushUserAddedToModerators(userDisplayName); diff --git a/ts/components/dialog/ModeratorsRemoveDialog.tsx b/ts/components/dialog/ModeratorsRemoveDialog.tsx index a1befbace9..a7ebc96bfd 100644 --- a/ts/components/dialog/ModeratorsRemoveDialog.tsx +++ b/ts/components/dialog/ModeratorsRemoveDialog.tsx @@ -2,12 +2,12 @@ import { compact } from 'lodash'; import { useState } from 'react'; import { useDispatch } from 'react-redux'; -import { getConversationController } from '../../session/conversations'; +import { ConvoHub } from '../../session/conversations'; import { PubKey } from '../../session/types'; import { ToastUtils } from '../../session/utils'; import { Flex } from '../basic/Flex'; -import { useConversationPropsById } from '../../hooks/useParamSelector'; +import { useGroupAdmins, useIsPublic, useWeAreAdmin } from '../../hooks/useParamSelector'; import { sogsV3RemoveAdmins } from '../../session/apis/open_group_api/sogsv3/sogsV3AddRemoveMods'; import { updateRemoveModeratorsModal } from '../../state/ducks/modalDialog'; import { MemberListItem } from '../MemberListItem'; @@ -29,11 +29,10 @@ async function removeMods(convoId: string, modsToRemove: Array) { const modsToRemovePubkey = compact(modsToRemove.map(m => PubKey.from(m))); const modsToRemoveNames = modsToRemovePubkey.map( m => - getConversationController().get(m.key)?.getNicknameOrRealUsernameOrPlaceholder() || - window.i18n('unknown') + ConvoHub.use().get(m.key)?.getNicknameOrRealUsernameOrPlaceholder() || window.i18n('unknown') ); try { - const convo = getConversationController().get(convoId); + const convo = ConvoHub.use().get(convoId); const roomInfos = convo.toOpenGroupV2(); @@ -64,6 +63,10 @@ export const RemoveModeratorsDialog = (props: Props) => { dispatch(updateRemoveModeratorsModal(null)); }; + const weAreAdmin = useWeAreAdmin(conversationId); + const isPublic = useIsPublic(conversationId); + const groupAdmins = useGroupAdmins(conversationId); + const removeModsCall = async () => { if (modsToRemove.length) { setRemovingInProgress(true); @@ -75,12 +78,11 @@ export const RemoveModeratorsDialog = (props: Props) => { } }; - const convoProps = useConversationPropsById(conversationId); - if (!convoProps || !convoProps.isPublic || !convoProps.weAreAdmin) { + if (!isPublic || !weAreAdmin) { throw new Error('RemoveModeratorsDialog: convoProps invalid'); } - const existingMods = convoProps.groupAdmins || []; + const existingMods = groupAdmins || []; const hasMods = existingMods.length !== 0; return ( diff --git a/ts/components/dialog/OnionStatusPathDialog.tsx b/ts/components/dialog/OnionStatusPathDialog.tsx index 2221f24245..5e3cd1e4fe 100644 --- a/ts/components/dialog/OnionStatusPathDialog.tsx +++ b/ts/components/dialog/OnionStatusPathDialog.tsx @@ -1,5 +1,5 @@ import { ipcRenderer, shell } from 'electron'; -import { useState } from 'react'; +import { useState, SessionDataTestId } from 'react'; import { useDispatch } from 'react-redux'; import useHover from 'react-use/lib/useHover'; @@ -28,7 +28,7 @@ export type StatusLightType = { glowStartDelay: number; glowDuration: number; color?: string; - dataTestId?: string; + dataTestId?: SessionDataTestId; }; const StyledCountry = styled.div` @@ -165,7 +165,7 @@ const OnionPathModalInner = () => { export type OnionNodeStatusLightType = { glowStartDelay: number; glowDuration: number; - dataTestId?: string; + dataTestId?: SessionDataTestId; }; /** diff --git a/ts/components/dialog/OpenUrlModal.tsx b/ts/components/dialog/OpenUrlModal.tsx index de729a32d6..73ba6b663a 100644 --- a/ts/components/dialog/OpenUrlModal.tsx +++ b/ts/components/dialog/OpenUrlModal.tsx @@ -57,7 +57,7 @@ export function OpenUrlModal(props: OpenUrlModalState) { buttonColor={SessionButtonColor.Danger} buttonType={SessionButtonType.Simple} onClick={onClickOpen} - dataTestId="session-confirm-ok-button" + dataTestId="open-url-confirm-button" /> { } const { convoId, serverId } = msgProps; - const roomInfos = getConversationController().get(convoId).toOpenGroupV2(); + const roomInfos = ConvoHub.use().get(convoId).toOpenGroupV2(); const handleClose = () => { dispatch(updateReactClearAllModal(null)); diff --git a/ts/components/dialog/SessionNicknameDialog.tsx b/ts/components/dialog/SessionNicknameDialog.tsx index 1e3ac0f751..0ffa9daf5c 100644 --- a/ts/components/dialog/SessionNicknameDialog.tsx +++ b/ts/components/dialog/SessionNicknameDialog.tsx @@ -3,7 +3,7 @@ import { useState } from 'react'; import { useDispatch } from 'react-redux'; import styled from 'styled-components'; -import { getConversationController } from '../../session/conversations'; +import { ConvoHub } from '../../session/conversations'; import { changeNickNameModal } from '../../state/ducks/modalDialog'; import { SessionWrapperModal } from '../SessionWrapperModal'; @@ -53,7 +53,7 @@ export const SessionNicknameDialog = (props: Props) => { if (!conversationId) { throw new Error('Cant save without conversation id'); } - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); await conversation.setNickname(nickname, true); onClickClose(); }; diff --git a/ts/components/dialog/SessionSetPasswordDialog.tsx b/ts/components/dialog/SessionSetPasswordDialog.tsx index 67b5490c7b..68cfef2348 100644 --- a/ts/components/dialog/SessionSetPasswordDialog.tsx +++ b/ts/components/dialog/SessionSetPasswordDialog.tsx @@ -218,7 +218,6 @@ export class SessionSetPasswordDialog extends Component { ToastUtils.pushToastSuccess( 'setPasswordSuccessToast', - window.i18n.stripped('passwordSet'), window.i18n.stripped('passwordSetDescription') ); diff --git a/ts/components/dialog/StyledRootDialog.tsx b/ts/components/dialog/StyledRootDialog.tsx new file mode 100644 index 0000000000..d98a95fae8 --- /dev/null +++ b/ts/components/dialog/StyledRootDialog.tsx @@ -0,0 +1,3 @@ +import styled from 'styled-components'; + +export const StyledRootDialog = styled.div``; diff --git a/ts/components/dialog/UpdateGroupMembersDialog.tsx b/ts/components/dialog/UpdateGroupMembersDialog.tsx index 7e2bc5e2b0..63081243dc 100644 --- a/ts/components/dialog/UpdateGroupMembersDialog.tsx +++ b/ts/components/dialog/UpdateGroupMembersDialog.tsx @@ -1,8 +1,10 @@ -import _ from 'lodash'; +import _, { difference } from 'lodash'; +import { useMemo, useState } from 'react'; import { useDispatch } from 'react-redux'; import useKey from 'react-use/lib/useKey'; import styled from 'styled-components'; +import { PubkeyType } from 'libsession_util_nodejs'; import { ToastUtils, UserUtils } from '../../session/utils'; import { updateGroupMembersModal } from '../../state/ducks/modalDialog'; @@ -11,17 +13,33 @@ import { SessionWrapperModal } from '../SessionWrapperModal'; import { SessionButton, SessionButtonColor, SessionButtonType } from '../basic/SessionButton'; import { SpacerLG } from '../basic/Text'; -import { useConversationPropsById, useWeAreAdmin } from '../../hooks/useParamSelector'; +import { + useGroupAdmins, + useIsPrivate, + useIsPublic, + useSortedGroupMembers, + useWeAreAdmin, +} from '../../hooks/useParamSelector'; import { useSet } from '../../hooks/useSet'; -import { getConversationController } from '../../session/conversations'; -import { initiateClosedGroupUpdate } from '../../session/group/closed-group'; +import { ConvoHub } from '../../session/conversations'; +import { ClosedGroup } from '../../session/group/closed-group'; +import { PubKey } from '../../session/types'; +import { hasClosedGroupV2QAButtons } from '../../shared/env_vars'; +import { groupInfoActions } from '../../state/ducks/metaGroups'; +import { + useMemberGroupChangePending, + useStateOf03GroupMembers, +} from '../../state/selectors/groups'; +import { useSelectedIsGroupV2 } from '../../state/selectors/selectedConversation'; +import { SessionSpinner } from '../loading'; +import { SessionToggle } from '../basic/SessionToggle'; type Props = { conversationId: string; }; -const StyledClassicMemberList = styled.div` +const StyledMemberList = styled.div` max-height: 240px; `; @@ -29,7 +47,7 @@ const StyledClassicMemberList = styled.div` * Admins are always put first in the list of group members. * Also, admins have a little crown on their avatar. */ -const ClassicMemberList = (props: { +const MemberList = (props: { convoId: string; selectedMembers: Array; onSelect: (m: string) => void; @@ -37,17 +55,22 @@ const ClassicMemberList = (props: { }) => { const { onSelect, convoId, onUnselect, selectedMembers } = props; const weAreAdmin = useWeAreAdmin(convoId); - const convoProps = useConversationPropsById(convoId); - if (!convoProps) { - throw new Error('MemberList needs convoProps'); - } - let currentMembers = convoProps.members || []; - const { groupAdmins } = convoProps; - currentMembers = [...currentMembers].sort(m => (groupAdmins?.includes(m) ? -1 : 0)); + const isV2Group = useSelectedIsGroupV2(); + + const groupAdmins = useGroupAdmins(convoId); + const groupMembers = useSortedGroupMembers(convoId); + const groupMembers03Group = useStateOf03GroupMembers(convoId); + + const sortedMembersNon03 = useMemo( + () => [...groupMembers].sort(m => (groupAdmins?.includes(m) ? -1 : 0)), + [groupMembers, groupAdmins] + ); + + const sortedMembers = isV2Group ? groupMembers03Group.map(m => m.pubkeyHex) : sortedMembersNon03; return ( <> - {currentMembers.map(member => { + {sortedMembers.map(member => { const isSelected = (weAreAdmin && selectedMembers.includes(member)) || false; const isAdmin = groupAdmins?.includes(member); @@ -59,7 +82,10 @@ const ClassicMemberList = (props: { onSelect={onSelect} onUnselect={onUnselect} isAdmin={isAdmin} + hideRadioButton={isAdmin} // we want to hide the toggle for admins are they are not selectable disableBg={true} + displayGroupStatus={isV2Group && weAreAdmin} + groupPk={convoId} /> ); })} @@ -68,11 +94,11 @@ const ClassicMemberList = (props: { }; async function onSubmit(convoId: string, membersAfterUpdate: Array) { - const convoFound = getConversationController().get(convoId); + const convoFound = ConvoHub.use().get(convoId); if (!convoFound || !convoFound.isGroup()) { throw new Error('Invalid convo for updateGroupMembersDialog'); } - if (!convoFound.isAdmin(UserUtils.getOurPubKeyStrFromCache())) { + if (!convoFound.weAreAdminUnblinded()) { window.log.warn('Skipping update of members, we are not the admin'); return; } @@ -87,8 +113,8 @@ async function onSubmit(convoId: string, membersAfterUpdate: Array) { // We consider that the admin ALWAYS wants to remove zombies (actually they should be removed // automatically by him when the LEFT message is received) - const existingMembers = convoFound.get('members') || []; - const existingZombies = convoFound.get('zombies') || []; + const existingMembers = convoFound.getGroupMembers() || []; + const existingZombies = convoFound.getGroupZombies() || []; const allExistingMembersWithZombies = _.uniq(existingMembers.concat(existingZombies)); @@ -115,40 +141,54 @@ async function onSubmit(convoId: string, membersAfterUpdate: Array) { memberAfterUpdate => !_.includes(membersToRemove, memberAfterUpdate) ); - void initiateClosedGroupUpdate( + void ClosedGroup.initiateClosedGroupUpdate( convoId, - convoFound.get('displayNameInProfile') || 'Unknown', + convoFound.getRealSessionUsername() || 'Unknown', filteredMembers ); } export const UpdateGroupMembersDialog = (props: Props) => { const { conversationId } = props; - const convoProps = useConversationPropsById(conversationId); - const existingMembers = convoProps?.members || []; + const isPrivate = useIsPrivate(conversationId); + const isPublic = useIsPublic(conversationId); + const weAreAdmin = useWeAreAdmin(conversationId); + const existingMembers = useSortedGroupMembers(conversationId) || []; + const groupAdmins = useGroupAdmins(conversationId); + const isProcessingUIChange = useMemberGroupChangePending(); + const [alsoRemoveMessages, setAlsoRemoveMessages] = useState(false); - const { - addTo, - removeFrom, - uniqueValues: membersToKeepWithUpdate, - } = useSet(existingMembers); + const { addTo, removeFrom, uniqueValues: membersToRemove } = useSet([]); const dispatch = useDispatch(); - if (!convoProps || convoProps.isPrivate || convoProps.isPublic) { + if (isPrivate || isPublic) { throw new Error('UpdateGroupMembersDialog invalid convoProps'); } - const weAreAdmin = convoProps.weAreAdmin || false; - const closeDialog = () => { dispatch(updateGroupMembersModal(null)); }; const onClickOK = async () => { - // const members = getWouldBeMembers(this.state.contactList).map(d => d.id); - // do not include zombies here, they are removed by force - await onSubmit(conversationId, membersToKeepWithUpdate); + if (PubKey.is03Pubkey(conversationId)) { + const groupv2Action = groupInfoActions.currentDeviceGroupMembersChange({ + groupPk: conversationId, + addMembersWithHistory: [], + addMembersWithoutHistory: [], + removeMembers: membersToRemove as Array, + alsoRemoveMessages, + }); + dispatch(groupv2Action as any); + + return; // keeping the dialog open until the async thunk is done + } + + await onSubmit( + conversationId, + difference(existingMembers, membersToRemove) as Array + ); + closeDialog(); }; @@ -156,25 +196,25 @@ export const UpdateGroupMembersDialog = (props: Props) => { return event.key === 'Esc' || event.key === 'Escape'; }, closeDialog); - const onAdd = (member: string) => { + const onSelect = (member: string) => { if (!weAreAdmin) { - window?.log?.warn('Only group admin can add members!'); + window?.log?.warn('Only group admin can select!'); + return; + } + + if (groupAdmins?.includes(member)) { + ToastUtils.pushCannotRemoveGroupAdmin(); + window?.log?.warn(`User ${member} cannot be selected as they are an admin.`); + return; } addTo(member); }; - const onRemove = (member: string) => { + const onUnselect = (member: string) => { if (!weAreAdmin) { - window?.log?.warn('Only group admin can remove members!'); - return; - } - if (convoProps.groupAdmins?.includes(member)) { - ToastUtils.pushCannotRemoveCreatorFromGroup(); - window?.log?.warn( - `User ${member} cannot be removed as they are the creator of the closed group.` - ); + window?.log?.warn('Only group admin can unselect members!'); return; } @@ -182,33 +222,51 @@ export const UpdateGroupMembersDialog = (props: Props) => { }; const showNoMembersMessage = existingMembers.length === 0; - const okText = window.i18n('okay'); - const cancelText = window.i18n('cancel'); - const titleText = window.i18n('groupMembers'); return ( - - - + {hasClosedGroupV2QAButtons() && weAreAdmin && PubKey.is03Pubkey(conversationId) ? ( + <> + Also remove messages: + { + setAlsoRemoveMessages(!alsoRemoveMessages); + }} + /> + + ) : null} + + - + {showNoMembersMessage &&

{window.i18n('groupMembersNone')}

} + +
{weAreAdmin && ( - + )}
diff --git a/ts/components/dialog/UpdateGroupNameDialog.tsx b/ts/components/dialog/UpdateGroupNameDialog.tsx index 01bf187e8f..8f3e23fd46 100644 --- a/ts/components/dialog/UpdateGroupNameDialog.tsx +++ b/ts/components/dialog/UpdateGroupNameDialog.tsx @@ -1,21 +1,26 @@ /* eslint-disable @typescript-eslint/no-misused-promises */ -import autoBind from 'auto-bind'; +import { useState } from 'react'; import { motion } from 'framer-motion'; -import { Component } from 'react'; +import { useDispatch } from 'react-redux'; +import useKey from 'react-use/lib/useKey'; import styled from 'styled-components'; -import { ConversationModel } from '../../models/conversation'; -import { getConversationController } from '../../session/conversations'; -import { initiateClosedGroupUpdate } from '../../session/group/closed-group'; +import { useIsClosedGroup, useIsPublic } from '../../hooks/useParamSelector'; +import { ConvoHub } from '../../session/conversations'; +import { ClosedGroup } from '../../session/group/closed-group'; import { initiateOpenGroupUpdate } from '../../session/group/open-group'; +import { PubKey } from '../../session/types'; +import LIBSESSION_CONSTANTS from '../../session/utils/libsession/libsession_constants'; +import { groupInfoActions } from '../../state/ducks/metaGroups'; import { updateGroupNameModal } from '../../state/ducks/modalDialog'; +import { useGroupNameChangeFromUIPending } from '../../state/selectors/groups'; import { THEME_GLOBALS } from '../../themes/globals'; import { pickFileForAvatar } from '../../types/attachments/VisualAttachment'; import { SessionWrapperModal } from '../SessionWrapperModal'; import { Avatar, AvatarSize } from '../avatar/Avatar'; import { SessionButton, SessionButtonColor, SessionButtonType } from '../basic/SessionButton'; import { SpacerMD } from '../basic/Text'; -import LIBSESSION_CONSTANTS from '../../session/utils/libsession/libsession_constants'; +import { SessionSpinner } from '../loading'; const StyledErrorMessage = styled(motion.p)` text-align: center; @@ -24,214 +29,193 @@ const StyledErrorMessage = styled(motion.p)` user-select: none; `; -type Props = { +function GroupAvatar({ + isPublic, + conversationId, + fireInputEvent, + newAvatarObjecturl, + oldAvatarPath, +}: { + isPublic: boolean; conversationId: string; -}; - -interface State { - groupName: string | undefined; - errorDisplayed: boolean; - errorMessage: string; - oldAvatarPath: string | null; newAvatarObjecturl: string | null; -} + oldAvatarPath: string | null; + fireInputEvent: () => Promise; +}) { + if (!isPublic) { + return null; + } -export class UpdateGroupNameDialog extends Component { - private readonly convo: ConversationModel; + return ( +
+
+ +
+
+
+ ); +} - constructor(props: Props) { - super(props); +export function UpdateGroupNameDialog(props: { conversationId: string }) { + const dispatch = useDispatch(); + const { conversationId } = props; + const [errorMsg, setErrorMsg] = useState(''); + const [errorDisplayed, setErrorDisplayed] = useState(false); + const [newAvatarObjecturl, setNewAvatarObjecturl] = useState(null); + const isCommunity = useIsPublic(conversationId); + const isClosedGroup = useIsClosedGroup(conversationId); + const convo = ConvoHub.use().get(conversationId); + const isNameChangePending = useGroupNameChangeFromUIPending(); + + if (!convo) { + throw new Error('UpdateGroupNameDialog corresponding convo not found'); + } - autoBind(this); - this.convo = getConversationController().get(props.conversationId); + const oldAvatarPath = convo?.getAvatarPath() || null; + const originalGroupName = convo?.getRealSessionUsername(); + const [newGroupName, setNewGroupName] = useState(originalGroupName); - this.state = { - groupName: this.convo.getRealSessionUsername(), - errorDisplayed: false, - errorMessage: 'placeholder', - oldAvatarPath: this.convo.getAvatarPath(), - newAvatarObjecturl: null, - }; + function closeDialog() { + dispatch(updateGroupNameModal(null)); } - public componentDidMount() { - window.addEventListener('keyup', this.onKeyUp); + function onShowError(msg: string) { + if (errorMsg === msg) { + return; + } + setErrorMsg(msg); + setErrorDisplayed(true); + + setTimeout(() => { + setErrorDisplayed(false); + }, 3000); } - public componentWillUnmount() { - window.removeEventListener('keyup', this.onKeyUp); + async function fireInputEvent() { + const scaledObjectUrl = await pickFileForAvatar(); + if (scaledObjectUrl) { + setNewAvatarObjecturl(scaledObjectUrl); + } } - public onClickOK() { - const { groupName, newAvatarObjecturl, oldAvatarPath } = this.state; - const trimmedGroupName = groupName?.trim(); + function onClickOK() { + if (isNameChangePending) { + return; + } + const trimmedGroupName = newGroupName?.trim(); if (!trimmedGroupName) { - this.onShowError(window.i18n('groupNameEnterPlease')); + onShowError(window.i18n('groupNameEnterPlease')); return; } if (trimmedGroupName.length > LIBSESSION_CONSTANTS.BASE_GROUP_MAX_NAME_LENGTH) { - this.onShowError(window.i18n('groupNameEnterShorter')); + onShowError(window.i18n('groupNameEnterShorter')); return; } + onShowError(''); - if ( - trimmedGroupName !== this.convo.getRealSessionUsername() || - newAvatarObjecturl !== oldAvatarPath - ) { - if (this.convo.isPublic()) { - void initiateOpenGroupUpdate(this.convo.id, trimmedGroupName, { + if (trimmedGroupName !== originalGroupName || newAvatarObjecturl !== oldAvatarPath) { + if (isCommunity) { + void initiateOpenGroupUpdate(conversationId, trimmedGroupName, { objectUrl: newAvatarObjecturl, }); + closeDialog(); } else { - const members = this.convo.get('members') || []; - - void initiateClosedGroupUpdate(this.convo.id, trimmedGroupName, members); + if (PubKey.is03Pubkey(conversationId)) { + const updateNameAction = groupInfoActions.currentDeviceGroupNameChange({ + groupPk: conversationId, + newName: trimmedGroupName, + }); + dispatch(updateNameAction as any); + return; // keeping the dialog open until the async thunk is done (via isNameChangePending) + } + void ClosedGroup.initiateClosedGroupUpdate(conversationId, trimmedGroupName, null); + closeDialog(); } } - - this.closeDialog(); } - public render() { - const okText = window.i18n('okay'); - const cancelText = window.i18n('cancel'); - - const errorMsg = this.state.errorMessage; - const isAdmin = !this.convo.isPublic(); - - return ( - this.closeDialog()} - additionalClassName="update-group-dialog" - > - {this.state.errorDisplayed ? ( - <> - - - {errorMsg} - - - - ) : null} - - {this.renderAvatar()} - - - {isAdmin ? ( - - ) : null} - -
- - -
-
- ); - } + useKey('Escape', closeDialog); + useKey('Esc', closeDialog); + useKey('Enter', onClickOK); - private onShowError(msg: string) { - if (this.state.errorDisplayed) { - return; - } - - this.setState({ - errorDisplayed: true, - errorMessage: msg, - }); - - setTimeout(() => { - this.setState({ - errorDisplayed: false, - }); - }, 3000); - } - - private onKeyUp(event: any) { - switch (event.key) { - case 'Enter': - this.onClickOK(); - break; - case 'Esc': - case 'Escape': - this.closeDialog(); - break; - default: - } - } - - private closeDialog() { - window.removeEventListener('keyup', this.onKeyUp); - - window.inboxStore?.dispatch(updateGroupNameModal(null)); + if (!isClosedGroup && !isCommunity) { + throw new Error('groupNameUpdate dialog only works for communities and closed groups'); } - private onGroupNameChanged(event: any) { - const groupName = event.target.value; - this.setState(state => { - return { - ...state, - groupName, - }; - }); - } - - private renderAvatar() { - const isPublic = this.convo.isPublic(); - const pubkey = this.convo.id; - - const { newAvatarObjecturl, oldAvatarPath } = this.state; - - if (!isPublic) { - return undefined; - } - - return ( -
-
- -
-
+ const okText = window.i18n('okay'); + const cancelText = window.i18n('cancel'); + + const isAdmin = !isCommunity; + // return null; + + return ( + closeDialog()} + additionalClassName="update-group-dialog" + > + {errorMsg ? ( + <> + + + {errorMsg} + + + + ) : null} + + + + + {isAdmin ? ( + setNewGroupName(e.target.value)} + tabIndex={0} + required={true} + aria-required={true} + autoFocus={true} + maxLength={LIBSESSION_CONSTANTS.BASE_GROUP_MAX_NAME_LENGTH} + data-testid="group-name-input" + /> + ) : null} + + + +
+ +
- ); - } - - private async fireInputEvent() { - const scaledObjectUrl = await pickFileForAvatar(); - if (scaledObjectUrl) { - this.setState({ newAvatarObjecturl: scaledObjectUrl }); - } - } +
+ ); } diff --git a/ts/components/dialog/UserDetailsDialog.tsx b/ts/components/dialog/UserDetailsDialog.tsx index 7db5b0ee2c..ab7dd2ab08 100644 --- a/ts/components/dialog/UserDetailsDialog.tsx +++ b/ts/components/dialog/UserDetailsDialog.tsx @@ -1,7 +1,7 @@ import { useState } from 'react'; import useKey from 'react-use/lib/useKey'; -import { getConversationController } from '../../session/conversations'; +import { ConvoHub } from '../../session/conversations'; import { openConversationWithMessages } from '../../state/ducks/conversations'; import { updateUserDetailsModal, UserDetailsModalState } from '../../state/ducks/modalDialog'; import { Avatar, AvatarSize } from '../avatar/Avatar'; @@ -26,9 +26,9 @@ export const UserDetailsDialog = (props: UserDetailsModalState) => { if (!props) { return; } - const convo = getConversationController().get(props.conversationId); + const convo = ConvoHub.use().get(props.conversationId); - const conversation = await getConversationController().getOrCreateAndWait( + const conversation = await ConvoHub.use().getOrCreateAndWait( convo.id, ConversationTypeEnum.PRIVATE ); diff --git a/ts/components/dialog/blockOrUnblock/BlockOrUnblockDialog.tsx b/ts/components/dialog/blockOrUnblock/BlockOrUnblockDialog.tsx index fea7051e75..f24ea0969c 100644 --- a/ts/components/dialog/blockOrUnblock/BlockOrUnblockDialog.tsx +++ b/ts/components/dialog/blockOrUnblock/BlockOrUnblockDialog.tsx @@ -14,6 +14,7 @@ import { SessionButton, SessionButtonColor, SessionButtonType } from '../../basi import { StyledModalDescriptionContainer } from '../shared/ModalDescriptionContainer'; import { BlockOrUnblockModalState } from './BlockOrUnblockModalState'; import type { LocalizerComponentPropsObject } from '../../../types/localizer'; +import { UserSync } from '../../../session/utils/job_runners/jobs/UserSyncJob'; type ModalState = NonNullable; @@ -77,6 +78,7 @@ export const BlockOrUnblockDialog = ({ pubkeys, action, onConfirmed }: NonNullab } closeModal(); onConfirmed?.(); + await UserSync.queueNewJobIfNeeded(); }, [action, onConfirmed, pubkeys]); if (isEmpty(pubkeys)) { @@ -104,7 +106,7 @@ export const BlockOrUnblockDialog = ({ pubkeys, action, onConfirmed }: NonNullab buttonColor={SessionButtonColor.White} onClick={closeModal} text={window.i18n('cancel')} - dataTestId="session-cancel-ok-button" + dataTestId="session-confirm-cancel-button" />
diff --git a/ts/components/icon/SessionIcon.tsx b/ts/components/icon/SessionIcon.tsx index bdb96da659..c116294ccb 100644 --- a/ts/components/icon/SessionIcon.tsx +++ b/ts/components/icon/SessionIcon.tsx @@ -1,6 +1,6 @@ +import { memo, SessionDataTestId } from 'react'; import styled, { css, CSSProperties, keyframes } from 'styled-components'; -import { memo } from 'react'; import { icons, SessionIconSize, SessionIconType } from '.'; import { ClipRule, FillRule } from './Icons'; @@ -19,10 +19,10 @@ export type SessionIconProps = { glowStartDelay?: number; noScale?: boolean; backgroundColor?: string; + dataTestId?: SessionDataTestId; style?: CSSProperties; - dataTestId?: string; unreadCount?: number; - /** for some usecases, we want to fix the width of the icon and have the height be calculated from the ratio of the icon */ + /** for some use cases, we want to fix the width of the icon and have the height be calculated from the ratio of the icon */ sizeIsWidth?: boolean; }; @@ -145,7 +145,7 @@ const SessionSvg = ( viewBox: string; path: string | Array; style?: CSSProperties; - dataTestId?: string; + dataTestId?: SessionDataTestId; } ) => { const colorSvg = props.iconColor ? props.iconColor : 'var(--button-icon-stroke-color)'; diff --git a/ts/components/icon/SessionIconButton.tsx b/ts/components/icon/SessionIconButton.tsx index 1853a0f93a..247110f589 100644 --- a/ts/components/icon/SessionIconButton.tsx +++ b/ts/components/icon/SessionIconButton.tsx @@ -1,6 +1,6 @@ import classNames from 'classnames'; import _ from 'lodash'; -import { KeyboardEvent, MouseEvent, ReactNode, forwardRef, memo } from 'react'; +import { KeyboardEvent, MouseEvent, SessionDataTestId, ReactNode, forwardRef, memo } from 'react'; import styled from 'styled-components'; import { SessionIcon, SessionIconProps } from './SessionIcon'; @@ -9,8 +9,9 @@ export type SessionIconButtonProps = SessionIconProps & { isSelected?: boolean; isHidden?: boolean; margin?: string; + dataTestId?: SessionDataTestId; + dataTestIdIcon?: SessionDataTestId; padding?: string; - dataTestIdIcon?: string; id?: string; title?: string; ariaLabel?: string; diff --git a/ts/components/inputs/SessionInput.tsx b/ts/components/inputs/SessionInput.tsx index b7ef7c1dde..59c2c469db 100644 --- a/ts/components/inputs/SessionInput.tsx +++ b/ts/components/inputs/SessionInput.tsx @@ -1,4 +1,12 @@ -import { ChangeEvent, ReactNode, RefObject, useEffect, useRef, useState } from 'react'; +import { + ChangeEvent, + ReactNode, + RefObject, + SessionDataTestId, + useEffect, + useRef, + useState, +} from 'react'; import { motion } from 'framer-motion'; import { isEmpty, isEqual } from 'lodash'; @@ -185,13 +193,13 @@ const ErrorItem = (props: { id: string; error: string }) => { ); }; -type ShowHideButtonStrings = { hide: string; show: string }; +type ShowHideButtonStrings = { hide: T; show: T }; type ShowHideButtonProps = { forceShow: boolean; toggleForceShow: () => void; error: boolean; - ariaLabels?: ShowHideButtonStrings; - dataTestIds?: ShowHideButtonStrings; + ariaLabels?: ShowHideButtonStrings; + dataTestIds?: ShowHideButtonStrings; }; const ShowHideButton = (props: ShowHideButtonProps) => { @@ -255,11 +263,11 @@ type Props = { autoFocus?: boolean; disableOnBlurEvent?: boolean; inputRef?: RefObject; - inputDataTestId?: string; + inputDataTestId?: SessionDataTestId; id?: string; enableShowHideButton?: boolean; - showHideButtonAriaLabels?: ShowHideButtonStrings; - showHideButtonDataTestIds?: ShowHideButtonStrings; + showHideButtonAriaLabels?: ShowHideButtonStrings; + showHideButtonDataTestIds?: ShowHideButtonStrings; ctaButton?: ReactNode; monospaced?: boolean; textSize?: TextSizes; diff --git a/ts/components/leftpane/ActionsPanel.tsx b/ts/components/leftpane/ActionsPanel.tsx index 4a67bc81bf..c5c96ca032 100644 --- a/ts/components/leftpane/ActionsPanel.tsx +++ b/ts/components/leftpane/ActionsPanel.tsx @@ -8,20 +8,18 @@ import useTimeoutFn from 'react-use/lib/useTimeoutFn'; import useThrottleFn from 'react-use/lib/useThrottleFn'; import { Data } from '../../data/data'; -import { getConversationController } from '../../session/conversations'; -import { getMessageQueue } from '../../session/sending'; -import { syncConfigurationIfNeeded } from '../../session/utils/sync/syncUtils'; +import { ConvoHub } from '../../session/conversations'; import { clearSearch } from '../../state/ducks/search'; import { resetLeftOverlayMode, SectionType, showLeftPaneSection } from '../../state/ducks/section'; import { - getGlobalUnreadMessageCount, getOurPrimaryConversation, + useGlobalUnreadMessageCount, } from '../../state/selectors/conversations'; import { getFocusedSection } from '../../state/selectors/section'; import { getOurNumber } from '../../state/selectors/user'; -import { cleanUpOldDecryptedMedias } from '../../session/crypto/DecryptedAttachmentsManager'; +import { DecryptedAttachmentsManager } from '../../session/crypto/DecryptedAttachmentsManager'; import { DURATION } from '../../session/constants'; @@ -38,27 +36,27 @@ import { SessionIconButton } from '../icon/SessionIconButton'; import { LeftPaneSectionContainer } from './LeftPaneSectionContainer'; import { SettingsKey } from '../../data/settings-key'; +import { SnodePool } from '../../session/apis/snode_api/snodePool'; +import { UserSync } from '../../session/utils/job_runners/jobs/UserSyncJob'; +import { forceSyncConfigurationNowIfNeeded } from '../../session/utils/sync/syncUtils'; import { useFetchLatestReleaseFromFileServer } from '../../hooks/useFetchLatestReleaseFromFileServer'; import { useHotkey } from '../../hooks/useHotkey'; -import { - forceRefreshRandomSnodePool, - getFreshSwarmFor, -} from '../../session/apis/snode_api/snodePool'; -import { ConfigurationSync } from '../../session/utils/job_runners/jobs/ConfigurationSyncJob'; -import { getIsModalVisble } from '../../state/selectors/modal'; import { useIsDarkTheme } from '../../state/selectors/theme'; import { switchThemeTo } from '../../themes/switchTheme'; -import { ReleasedFeatures } from '../../util/releaseFeature'; import { getOppositeTheme } from '../../util/theme'; import { SessionNotificationCount } from '../icon/SessionNotificationCount'; +import { getIsModalVisible } from '../../state/selectors/modal'; + +import { ReleasedFeatures } from '../../util/releaseFeature'; +import { MessageQueue } from '../../session/sending'; const Section = (props: { type: SectionType }) => { const ourNumber = useSelector(getOurNumber); - const globalUnreadMessageCount = useSelector(getGlobalUnreadMessageCount); + const globalUnreadMessageCount = useGlobalUnreadMessageCount(); const dispatch = useDispatch(); const { type } = props; - const isModalVisible = useSelector(getIsModalVisble); + const isModalVisible = useSelector(getIsModalVisible); const isDarkTheme = useIsDarkTheme(); const focusedSection = useSelector(getFocusedSection); const isSelected = focusedSection === props.type; @@ -105,6 +103,7 @@ const Section = (props: { type: SectionType }) => { onAvatarClick={handleClick} pubkey={ourNumber} dataTestId="leftpane-primary-avatar" + imageDataTestId={`img-leftpane-primary-avatar`} /> ); } @@ -162,12 +161,12 @@ const cleanUpMediasInterval = DURATION.MINUTES * 60; // Do this only if we created a new account id, or if we already received the initial configuration message const triggerSyncIfNeeded = async () => { const us = UserUtils.getOurPubKeyStrFromCache(); - await getConversationController().get(us).setDidApproveMe(true, true); - await getConversationController().get(us).setIsApproved(true, true); + await ConvoHub.use().get(us).setDidApproveMe(true, true); + await ConvoHub.use().get(us).setIsApproved(true, true); const didWeHandleAConfigurationMessageAlready = (await Data.getItemById(SettingsKey.hasSyncedInitialConfigurationItem))?.value || false; if (didWeHandleAConfigurationMessageAlready) { - await syncConfigurationIfNeeded(); + await forceSyncConfigurationNowIfNeeded(); } }; @@ -196,7 +195,8 @@ const doAppStartUp = async () => { void triggerSyncIfNeeded(); void getSwarmPollingInstance().start(); void loadDefaultRooms(); - void getFreshSwarmFor(UserUtils.getOurPubKeyStrFromCache()); // refresh our swarm on start to speed up the first message fetching event + void SnodePool.getFreshSwarmFor(UserUtils.getOurPubKeyStrFromCache()); // refresh our swarm on start to speed up the first message fetching event + void Data.cleanupOrphanedAttachments(); // TODOLATER make this a job of the JobRunner debounce(triggerAvatarReUploadIfNeeded, 200); @@ -209,15 +209,30 @@ const doAppStartUp = async () => { global.setTimeout(() => { // init the messageQueue. In the constructor, we add all not send messages // this call does nothing except calling the constructor, which will continue sending message in the pipeline - void getMessageQueue().processAllPending(); + void MessageQueue.use().processAllPending(); }, 3000); global.setTimeout(() => { // Schedule a confSyncJob in some time to let anything incoming from the network be applied and see if there is a push needed - void ConfigurationSync.queueNewJobIfNeeded(); + void UserSync.queueNewJobIfNeeded(); }, 20000); }; +function useUpdateBadgeCount() { + const globalUnreadMessageCount = useGlobalUnreadMessageCount(); + + // Reuse the unreadToShow from the global state to update the badge count + useThrottleFn( + (unreadCount: number) => { + if (globalUnreadMessageCount !== undefined) { + ipcRenderer.send('update-badge-count', unreadCount); + } + }, + 2000, + [globalUnreadMessageCount] + ); +} + /** * ActionsPanel is the far left banner (not the left pane). * The panel with buttons to switch between the message/contact/settings/theme views @@ -227,7 +242,7 @@ export const ActionsPanel = () => { const ourPrimaryConversation = useSelector(getOurPrimaryConversation); // this maxi useEffect is called only once: when the component is mounted. - // For the action panel, it means this is called only one per app start/with a user loggedin + // For the action panel, it means this is called only one per app start/with a user logged in useEffect(() => { void doAppStartUp(); }, []); @@ -240,28 +255,20 @@ export const ActionsPanel = () => { return () => clearTimeout(timeout); }, []); - const globalUnreadMessageCount = useSelector(getGlobalUnreadMessageCount); + useUpdateBadgeCount(); - // Reuse the unreadToShow from the global state to update the badge count - useThrottleFn( - (unreadCount: number) => { - if (globalUnreadMessageCount !== undefined) { - ipcRenderer.send('update-badge-count', unreadCount); - } - }, - 2000, - [globalUnreadMessageCount] + useInterval( + DecryptedAttachmentsManager.cleanUpOldDecryptedMedias, + startCleanUpMedia ? cleanUpMediasInterval : null ); - useInterval(cleanUpOldDecryptedMedias, startCleanUpMedia ? cleanUpMediasInterval : null); - useFetchLatestReleaseFromFileServer(); useInterval(() => { if (!ourPrimaryConversation) { return; } - void syncConfigurationIfNeeded(); + void forceSyncConfigurationNowIfNeeded(); }, DURATION.DAYS * 2); useInterval(() => { @@ -270,7 +277,7 @@ export const ActionsPanel = () => { } // trigger an updates from the snodes every hour - void forceRefreshRandomSnodePool(); + void SnodePool.forceRefreshRandomSnodePool(); }, DURATION.HOURS * 1); useTimeoutFn(() => { @@ -278,7 +285,7 @@ export const ActionsPanel = () => { return; } // trigger an updates from the snodes after 5 minutes, once - void forceRefreshRandomSnodePool(); + void SnodePool.forceRefreshRandomSnodePool(); }, DURATION.MINUTES * 5); useInterval(() => { diff --git a/ts/components/leftpane/LeftPaneMessageSection.tsx b/ts/components/leftpane/LeftPaneMessageSection.tsx index 71cd42f971..249dcfb227 100644 --- a/ts/components/leftpane/LeftPaneMessageSection.tsx +++ b/ts/components/leftpane/LeftPaneMessageSection.tsx @@ -14,7 +14,7 @@ import { assertUnreachable } from '../../types/sqlSharedTypes'; import { SessionSearchInput } from '../SessionSearchInput'; import { StyledLeftPaneList } from './LeftPaneList'; import { ConversationListItem } from './conversation-list-item/ConversationListItem'; -import { OverlayClosedGroup } from './overlay/OverlayClosedGroup'; +import { OverlayLegacyClosedGroup, OverlayClosedGroupV2 } from './overlay/OverlayClosedGroup'; import { OverlayCommunity } from './overlay/OverlayCommunity'; import { OverlayInvite } from './overlay/OverlayInvite'; import { OverlayMessage } from './overlay/OverlayMessage'; @@ -46,7 +46,11 @@ const ClosableOverlay = () => { case 'open-group': return ; case 'closed-group': - return ; + return window.sessionFeatureFlags.useClosedGroupV2 ? ( + + ) : ( + + ); case 'message': return ; case 'message-requests': diff --git a/ts/components/leftpane/LeftPaneSettingSection.tsx b/ts/components/leftpane/LeftPaneSettingSection.tsx index 5371cc9832..2bd4a509c9 100644 --- a/ts/components/leftpane/LeftPaneSettingSection.tsx +++ b/ts/components/leftpane/LeftPaneSettingSection.tsx @@ -1,3 +1,4 @@ +import { SessionDataTestId } from 'react'; import { useDispatch, useSelector } from 'react-redux'; import styled from 'styled-components'; @@ -45,6 +46,7 @@ const StyledIconContainer = styled.div` type Categories = { id: SessionSettingCategory; title: string; + dataTestId: SessionDataTestId; icon: { type: SessionIconType; size: number; @@ -58,59 +60,56 @@ const getCategories = (): Array => { { id: 'privacy' as const, title: window.i18n('sessionPrivacy'), - icon: { type: 'padlock', ...forcedSize }, + icon: { type: 'padlock' as const, ...forcedSize }, }, { id: 'notifications' as const, title: window.i18n('sessionNotifications'), - icon: { type: 'speaker', ...forcedSize }, + icon: { type: 'speaker' as const, ...forcedSize }, }, { id: 'conversations' as const, title: window.i18n('sessionConversations'), - icon: { type: 'chatBubble', ...forcedSize }, + icon: { type: 'chatBubble' as const, ...forcedSize }, }, { id: 'messageRequests' as const, title: window.i18n('sessionMessageRequests'), - icon: { type: 'messageRequest', ...forcedSize }, + icon: { type: 'messageRequest' as const, ...forcedSize }, }, { id: 'appearance' as const, title: window.i18n('sessionAppearance'), - icon: { type: 'paintbrush', ...forcedSize }, + icon: { type: 'paintbrush' as const, ...forcedSize }, }, { - id: 'permissions', + id: 'permissions' as const, title: window.i18n('sessionPermissions'), - icon: { type: 'checkCircle', ...forcedSize }, + icon: { type: 'checkCircle' as const, ...forcedSize }, }, { id: 'help' as const, title: window.i18n('sessionHelp'), - icon: { type: 'question', ...forcedSize }, + icon: { type: 'question' as const, ...forcedSize }, }, { id: 'recoveryPassword' as const, title: window.i18n('sessionRecoveryPassword'), - icon: { type: 'recoveryPasswordFill', ...forcedSize }, + icon: { type: 'recoveryPasswordFill' as const, ...forcedSize }, }, { id: 'clearData' as const, title: window.i18n('sessionClearData'), - icon: { type: 'delete', ...forcedSize, color: 'var(--danger-color)' }, + icon: { type: 'delete' as const, ...forcedSize, color: 'var(--danger-color)' }, }, - ]; + ].map(m => ({ ...m, dataTestId: `${m.id}-settings-menu-item` as const })); }; -const LeftPaneSettingsCategoryRow = (props: { item: Categories }) => { - const { item } = props; - const { id, title, icon } = item; +const LeftPaneSettingsCategoryRow = ({ item }: { item: Categories }) => { + const { id, title, icon, dataTestId } = item; const dispatch = useDispatch(); const focusedSettingsSection = useSelector(getFocusedSettingsSection); - const dataTestId = `${title.toLowerCase().replace(' ', '-')}-settings-menu-item`; - const isClearData = id === 'clearData'; return ( diff --git a/ts/components/leftpane/conversation-list-item/HeaderItem.tsx b/ts/components/leftpane/conversation-list-item/HeaderItem.tsx index 85878befcc..03ec311dc9 100644 --- a/ts/components/leftpane/conversation-list-item/HeaderItem.tsx +++ b/ts/components/leftpane/conversation-list-item/HeaderItem.tsx @@ -7,11 +7,11 @@ import { useConvoIdFromContext } from '../../../contexts/ConvoIdContext'; import { Data } from '../../../data/data'; import { useActiveAt, - useConversationPropsById, useHasUnread, useIsForcedUnreadWithoutUnreadMsg, useIsPinned, useMentionedUs, + useNotificationSetting, useUnreadCount, } from '../../../hooks/useParamSelector'; import { Constants } from '../../../session'; @@ -28,7 +28,7 @@ import { UserItem } from './UserItem'; const NotificationSettingIcon = () => { const isMessagesSection = useSelector(getIsMessageSection); const convoId = useConvoIdFromContext(); - const convoSetting = useConversationPropsById(convoId)?.currentNotificationSetting; + const convoSetting = useNotificationSetting(convoId); if (!isMessagesSection) { return null; diff --git a/ts/components/leftpane/conversation-list-item/InteractionItem.tsx b/ts/components/leftpane/conversation-list-item/InteractionItem.tsx index 16ae061284..fa977225a8 100644 --- a/ts/components/leftpane/conversation-list-item/InteractionItem.tsx +++ b/ts/components/leftpane/conversation-list-item/InteractionItem.tsx @@ -3,7 +3,8 @@ import { useEffect, useState } from 'react'; import styled from 'styled-components'; import { useIsPrivate, useIsPublic } from '../../../hooks/useParamSelector'; -import { getConversationController } from '../../../session/conversations'; + +import { ConvoHub } from '../../../session/conversations'; import { assertUnreachable } from '../../../types/sqlSharedTypes'; import { MessageBody } from '../../conversation/message/message-content/MessageBody'; import { @@ -34,7 +35,7 @@ export const InteractionItem = (props: InteractionItemProps) => { // NOTE we want to reset the interaction state when the last message changes useEffect(() => { if (conversationId) { - const convo = getConversationController().get(conversationId); + const convo = ConvoHub.use().get(conversationId); if ( convo && @@ -59,9 +60,7 @@ export const InteractionItem = (props: InteractionItemProps) => { let text = storedLastMessageText || ''; let errorText = ''; - const name = getConversationController() - .get(conversationId) - ?.getNicknameOrRealUsernameOrPlaceholder(); + const name = ConvoHub.use().get(conversationId)?.getNicknameOrRealUsernameOrPlaceholder(); switch (interactionType) { case ConversationInteractionType.Hide: diff --git a/ts/components/leftpane/conversation-list-item/MessageItem.tsx b/ts/components/leftpane/conversation-list-item/MessageItem.tsx index 6350b58709..78d240cdfb 100644 --- a/ts/components/leftpane/conversation-list-item/MessageItem.tsx +++ b/ts/components/leftpane/conversation-list-item/MessageItem.tsx @@ -48,6 +48,7 @@ export const MessageItem = () => { if (isEmpty(text)) { return null; } + const withoutHtmlTags = text.replaceAll(/(<([^>]+)>)/gi, ''); return (
@@ -60,7 +61,12 @@ export const MessageItem = () => { {isConvoTyping ? ( ) : ( - + )}
{!isSearching && lastMessage && lastMessage.status && !isMessageRequest ? ( diff --git a/ts/components/leftpane/conversation-list-item/UserItem.tsx b/ts/components/leftpane/conversation-list-item/UserItem.tsx index b72b22c694..4ece2df0ad 100644 --- a/ts/components/leftpane/conversation-list-item/UserItem.tsx +++ b/ts/components/leftpane/conversation-list-item/UserItem.tsx @@ -1,3 +1,4 @@ +import { isEmpty } from 'lodash'; import { useConvoIdFromContext } from '../../../contexts/ConvoIdContext'; import { useConversationRealName, @@ -16,8 +17,8 @@ export const UserItem = () => { const isSearchResultsMode = useIsSearching(); const shortenedPubkey = PubKey.shorten(conversationId); - const isMe = useIsMe(conversationId); const username = useConversationUsername(conversationId); + const isMe = useIsMe(conversationId); const realName = useConversationRealName(conversationId); const hasNickname = useHasNickname(conversationId); @@ -29,7 +30,7 @@ export const UserItem = () => { : username; let shouldShowPubkey = false; - if ((!username || username.length === 0) && (!displayName || displayName.length === 0)) { + if (isEmpty(username) && isEmpty(displayName)) { shouldShowPubkey = true; } diff --git a/ts/components/leftpane/overlay/OverlayClosedGroup.tsx b/ts/components/leftpane/overlay/OverlayClosedGroup.tsx index 8cb488fd52..fd7e66cd10 100644 --- a/ts/components/leftpane/overlay/OverlayClosedGroup.tsx +++ b/ts/components/leftpane/overlay/OverlayClosedGroup.tsx @@ -4,30 +4,37 @@ import { useDispatch, useSelector } from 'react-redux'; import useKey from 'react-use/lib/useKey'; import styled from 'styled-components'; -import { isEmpty } from 'lodash'; +import { concat, isEmpty } from 'lodash'; +import useUpdate from 'react-use/lib/useUpdate'; +import { MemberListItem } from '../../MemberListItem'; import { SessionButton } from '../../basic/SessionButton'; -import { SessionSpinner } from '../../loading'; import { useSet } from '../../../hooks/useSet'; import { VALIDATION } from '../../../session/constants'; import { createClosedGroup } from '../../../session/conversations/createClosedGroup'; import { ToastUtils } from '../../../session/utils'; import LIBSESSION_CONSTANTS from '../../../session/utils/libsession/libsession_constants'; +import { isDevProd } from '../../../shared/env_vars'; +import { groupInfoActions } from '../../../state/ducks/metaGroups'; import { clearSearch } from '../../../state/ducks/search'; import { resetLeftOverlayMode } from '../../../state/ducks/section'; -import { getPrivateContactsPubkeys } from '../../../state/selectors/conversations'; +import { useContactsToInviteToGroup } from '../../../state/selectors/conversations'; +import { useIsCreatingGroupFromUIPending } from '../../../state/selectors/groups'; import { getSearchResultsContactOnly, getSearchTerm, useIsSearching, } from '../../../state/selectors/search'; -import { MemberListItem } from '../../MemberListItem'; +import { useOurPkStr } from '../../../state/selectors/user'; +import { GroupInviteRequiredVersionBanner } from '../../NoticeBanner'; import { SessionSearchInput } from '../../SessionSearchInput'; import { Flex } from '../../basic/Flex'; +import { Localizer } from '../../basic/Localizer'; +import { SessionToggle } from '../../basic/SessionToggle'; import { SpacerLG, SpacerMD } from '../../basic/Text'; import { SessionInput } from '../../inputs'; +import { SessionSpinner } from '../../loading'; import { StyledLeftPaneOverlay } from './OverlayMessage'; -import { Localizer } from '../../basic/Localizer'; const StyledMemberListNoContacts = styled.div` text-align: center; @@ -37,6 +44,8 @@ const StyledMemberListNoContacts = styled.div` const StyledNoResults = styled.div` width: 100%; + min-height: 40px; + max-height: 400px; padding: var(--margins-xl) var(--margins-sm); text-align: center; `; @@ -95,14 +104,149 @@ async function createClosedGroupWithErrorHandling( return false; } - await createClosedGroup(groupName, groupMemberIds, window.sessionFeatureFlags.useClosedGroupV3); + await createClosedGroup(groupName, groupMemberIds); return true; } -export const OverlayClosedGroup = () => { +// duplicated form the legacy one below because this one is a lot more tightly linked with redux async thunks logic +export const OverlayClosedGroupV2 = () => { + const dispatch = useDispatch(); + const us = useOurPkStr(); + const privateContactsPubkeys = useContactsToInviteToGroup(); + const isCreatingGroup = useIsCreatingGroupFromUIPending(); + const [groupName, setGroupName] = useState(''); + const forceUpdate = useUpdate(); + const { + uniqueValues: members, + addTo: addToSelected, + removeFrom: removeFromSelected, + } = useSet([]); + const isSearch = useIsSearching(); + const searchResultContactsOnly = useSelector(getSearchResultsContactOnly); + + function closeOverlay() { + dispatch(resetLeftOverlayMode()); + } + + async function onEnterPressed() { + if (isCreatingGroup) { + window?.log?.warn('Closed group creation already in progress'); + return; + } + // Validate groupName and groupMembers length + if (groupName.length === 0) { + ToastUtils.pushToastError('invalidGroupName', window.i18n('groupNameEnterPlease')); + return; + } + if (groupName.length > LIBSESSION_CONSTANTS.BASE_GROUP_MAX_NAME_LENGTH) { + ToastUtils.pushToastError('invalidGroupName', window.i18n('groupNameEnterShorter')); + return; + } + + // >= because we add ourself as a member AFTER this. so a 10 group is already invalid as it will be 11 with ourself + // the same is valid with groups count < 1 + + if (members.length < 1) { + ToastUtils.pushToastError('pickClosedGroupMember', window.i18n('groupCreateErrorNoMembers')); + return; + } + if (members.length >= VALIDATION.CLOSED_GROUP_SIZE_LIMIT) { + ToastUtils.pushToastError('closedGroupMaxSize', window.i18n('groupAddMemberMaximum')); + return; + } + // trigger the add through redux. + dispatch( + groupInfoActions.initNewGroupInWrapper({ + members: concat(members, [us]), + groupName, + us, + }) as any + ); + } + + useKey('Escape', closeOverlay); + + const noContactsForClosedGroup = privateContactsPubkeys.length === 0; + + const contactsToRender = isSearch ? searchResultContactsOnly : privateContactsPubkeys; + + const disableCreateButton = !members.length && !groupName.length; + + return ( +
+
+ +
+ + {/* TODO: localize those strings once out releasing those buttons for real Remove after QA */} + {isDevProd() && ( + <> + + Invite as admin?{' '} + { + window.sessionFeatureFlags.useGroupV2InviteAsAdmin = + !window.sessionFeatureFlags.useGroupV2InviteAsAdmin; + forceUpdate(); + }} + /> + + + )} + + + {!noContactsForClosedGroup && window.sessionFeatureFlags.useClosedGroupV2 && ( + + )} + + + {noContactsForClosedGroup ? ( + + ) : ( + contactsToRender.map((memberPubkey: string) => ( + m === memberPubkey)} + key={memberPubkey} + onSelect={addToSelected} + onUnselect={removeFromSelected} + disableBg={true} + /> + )) + )} + + + +
+ ); +}; + +export const OverlayLegacyClosedGroup = () => { const dispatch = useDispatch(); - const privateContactsPubkeys = useSelector(getPrivateContactsPubkeys); + const privateContactsPubkeys = useContactsToInviteToGroup(); const [groupName, setGroupName] = useState(''); const [groupNameError, setGroupNameError] = useState(undefined); const [loading, setLoading] = useState(false); diff --git a/ts/components/leftpane/overlay/OverlayMessage.tsx b/ts/components/leftpane/overlay/OverlayMessage.tsx index e5e45f0a85..ec5b199405 100644 --- a/ts/components/leftpane/overlay/OverlayMessage.tsx +++ b/ts/components/leftpane/overlay/OverlayMessage.tsx @@ -5,7 +5,9 @@ import styled from 'styled-components'; import { motion } from 'framer-motion'; import { isEmpty } from 'lodash'; import { useDispatch } from 'react-redux'; -import { getConversationController } from '../../../session/conversations'; + +import { ConvoHub } from '../../../session/conversations'; + import { PubKey } from '../../../session/types'; import { openConversationWithMessages } from '../../../state/ducks/conversations'; import { resetLeftOverlayMode } from '../../../state/ducks/section'; @@ -73,12 +75,12 @@ export const OverlayMessage = () => { const disableNextButton = !pubkeyOrOns || loading; async function openConvoOnceResolved(resolvedSessionID: string) { - const convo = await getConversationController().getOrCreateAndWait( + const convo = await ConvoHub.use().getOrCreateAndWait( resolvedSessionID, ConversationTypeEnum.PRIVATE ); - // we now want to show a conversation we just started on the leftpane, even if we did not send a message to it yet + // we now want to show a conversation we just started on the left pane, even if we did not send a message to it yet if (!convo.isActive() || convo.isHidden()) { // bump the timestamp only if we were not active before if (!convo.isActive()) { @@ -102,23 +104,23 @@ export const OverlayMessage = () => { return; } - const pubkeyorOnsTrimmed = pubkeyOrOns.trim(); - const validationError = PubKey.validateWithErrorNoBlinding(pubkeyorOnsTrimmed); + const pubkeyOrOnsTrimmed = pubkeyOrOns.trim(); + const validationError = PubKey.validateWithErrorNoBlinding(pubkeyOrOnsTrimmed); if (!validationError) { - await openConvoOnceResolved(pubkeyorOnsTrimmed); + await openConvoOnceResolved(pubkeyOrOnsTrimmed); return; } - const isPubkey = PubKey.validate(pubkeyorOnsTrimmed); - const isGroupPubkey = PubKey.isClosedGroupV3(pubkeyorOnsTrimmed); + const isPubkey = PubKey.validate(pubkeyOrOnsTrimmed); + const isGroupPubkey = PubKey.is03Pubkey(pubkeyOrOnsTrimmed); if ((isPubkey && validationError) || isGroupPubkey) { setPubkeyOrOnsError(validationError); return; } // this might be an ONS, validate the regex first - const mightBeOnsName = new RegExp(ONSResolve.onsNameRegex, 'g').test(pubkeyorOnsTrimmed); + const mightBeOnsName = new RegExp(ONSResolve.onsNameRegex, 'g').test(pubkeyOrOnsTrimmed); if (!mightBeOnsName) { setPubkeyOrOnsError(window.i18n('onsErrorNotRecognized')); return; @@ -126,7 +128,7 @@ export const OverlayMessage = () => { setLoading(true); try { - const resolvedSessionID = await ONSResolve.getSessionIDForOnsName(pubkeyorOnsTrimmed); + const resolvedSessionID = await ONSResolve.getSessionIDForOnsName(pubkeyOrOnsTrimmed); const idValidationError = PubKey.validateWithErrorNoBlinding(resolvedSessionID); if (idValidationError) { diff --git a/ts/components/leftpane/overlay/OverlayMessageRequest.tsx b/ts/components/leftpane/overlay/OverlayMessageRequest.tsx index d78f47ddb2..aaaf8275ca 100644 --- a/ts/components/leftpane/overlay/OverlayMessageRequest.tsx +++ b/ts/components/leftpane/overlay/OverlayMessageRequest.tsx @@ -10,6 +10,7 @@ import { useSelectedConversationKey } from '../../../state/selectors/selectedCon import { SessionButton, SessionButtonColor } from '../../basic/SessionButton'; import { SpacerLG } from '../../basic/Text'; import { ConversationListItem } from '../conversation-list-item/ConversationListItem'; +import { ed25519Str } from '../../../session/utils/String'; import { Localizer } from '../../basic/Localizer'; const MessageRequestListPlaceholder = styled.div` @@ -64,6 +65,9 @@ export const OverlayMessageRequest = () => { title: window.i18n('clearAll'), i18nMessage: { token: 'messageRequestsClearAllExplanation' }, onClose, + okTheme: SessionButtonColor.Danger, + closeTheme: SessionButtonColor.Primary, + okText: window.i18n('clear'), onClickOk: async () => { window?.log?.info('Blocking all message requests'); if (!hasRequests) { @@ -73,13 +77,20 @@ export const OverlayMessageRequest = () => { for (let index = 0; index < messageRequests.length; index++) { const convoId = messageRequests[index]; - // eslint-disable-next-line no-await-in-loop - await declineConversationWithoutConfirm({ - blockContact: false, - conversationId: convoId, - currentlySelectedConvo, - syncToDevices: false, - }); + try { + // eslint-disable-next-line no-await-in-loop + await declineConversationWithoutConfirm({ + alsoBlock: false, + conversationId: convoId, + currentlySelectedConvo, + syncToDevices: false, + conversationIdOrigin: null, // block is false, no need for conversationIdOrigin + }); + } catch (e) { + window.log.warn( + `failed to decline msg request ${ed25519Str(convoId)} with error: ${e.message}` + ); + } } await forceSyncConfigurationNowIfNeeded(); @@ -87,9 +98,6 @@ export const OverlayMessageRequest = () => { onClickClose: () => { window.inboxStore?.dispatch(updateConfirmModal(null)); }, - okTheme: SessionButtonColor.Danger, - closeTheme: SessionButtonColor.Primary, - okText: window.i18n('clear'), }) ); } diff --git a/ts/components/leftpane/overlay/choose-action/ActionRow.tsx b/ts/components/leftpane/overlay/choose-action/ActionRow.tsx index ca89861535..b4dcb6abf5 100644 --- a/ts/components/leftpane/overlay/choose-action/ActionRow.tsx +++ b/ts/components/leftpane/overlay/choose-action/ActionRow.tsx @@ -1,4 +1,5 @@ import styled from 'styled-components'; +import { SessionDataTestId } from 'react'; import { Flex } from '../../../basic/Flex'; import { SessionIcon, SessionIconSize, SessionIconType } from '../../../icon'; @@ -52,7 +53,7 @@ type ActionRowProps = { iconType: SessionIconType; iconSize?: number | SessionIconSize; onClick: () => void; - dataTestId: string; + dataTestId: SessionDataTestId; }; export function ActionRow(props: ActionRowProps) { diff --git a/ts/components/loading/spinner/Spinner.tsx b/ts/components/loading/spinner/Spinner.tsx index f83b7158a5..f119782ee4 100644 --- a/ts/components/loading/spinner/Spinner.tsx +++ b/ts/components/loading/spinner/Spinner.tsx @@ -1,9 +1,10 @@ +import { SessionDataTestId } from 'react'; import styled from 'styled-components'; type Props = { size: 'small' | 'normal'; direction?: string; - dataTestId?: string; + dataTestId?: SessionDataTestId; }; // Module: Spinner diff --git a/ts/components/menu/ConversationListItemContextMenu.tsx b/ts/components/menu/ConversationListItemContextMenu.tsx index 06ea3742ed..6d078f8a18 100644 --- a/ts/components/menu/ConversationListItemContextMenu.tsx +++ b/ts/components/menu/ConversationListItemContextMenu.tsx @@ -3,9 +3,12 @@ import { Menu } from 'react-contexify'; import { useSelector } from 'react-redux'; import { useConvoIdFromContext } from '../../contexts/ConvoIdContext'; import { useIsPinned, useIsPrivate, useIsPrivateAndFriend } from '../../hooks/useParamSelector'; -import { getConversationController } from '../../session/conversations'; +import { ConvoHub } from '../../session/conversations'; +import { + getIsMessageRequestOverlayShown, + getIsMessageSection, +} from '../../state/selectors/section'; import { useIsSearching } from '../../state/selectors/search'; -import { getIsMessageSection } from '../../state/selectors/section'; import { SessionContextMenuContainer } from '../SessionContextMenuContainer'; import { AcceptMsgRequestMenuItem, @@ -85,9 +88,10 @@ export const PinConversationMenuItem = (): JSX.Element | null => { const isPrivateAndFriend = useIsPrivateAndFriend(conversationId); const isPrivate = useIsPrivate(conversationId); const isPinned = useIsPinned(conversationId); + const isMessageRequest = useSelector(getIsMessageRequestOverlayShown); - if (isMessagesSection && (!isPrivate || (isPrivate && isPrivateAndFriend))) { - const conversation = getConversationController().get(conversationId); + if (isMessagesSection && !isMessageRequest && (!isPrivate || (isPrivate && isPrivateAndFriend))) { + const conversation = ConvoHub.use().get(conversationId); const togglePinConversation = () => { void conversation?.togglePinned(); diff --git a/ts/components/menu/Menu.tsx b/ts/components/menu/Menu.tsx index 27fd585476..7da4e79b89 100644 --- a/ts/components/menu/Menu.tsx +++ b/ts/components/menu/Menu.tsx @@ -8,9 +8,9 @@ import { useIsActive, useIsBlinded, useIsBlocked, + useIsGroupV2, useIsIncomingRequest, useIsKickedFromGroup, - useIsLeft, useIsMe, useIsPrivate, useIsPrivateAndFriend, @@ -21,43 +21,48 @@ import { useWeAreAdmin, } from '../../hooks/useParamSelector'; import { - approveConvoAndSendResponse, blockConvoById, clearNickNameByConvoId, declineConversationWithConfirm, deleteAllMessagesByConvoIdWithConfirmation, + handleAcceptConversationRequest, markAllReadByConvoId, setNotificationForConvoId, showAddModeratorsByConvoId, showBanUserByConvoId, showInviteContactByConvoId, showLeaveGroupByConvoId, - showLeavePrivateConversationbyConvoId, + showLeavePrivateConversationByConvoId, showRemoveModeratorsByConvoId, showUnbanUserByConvoId, showUpdateGroupNameByConvoId, unblockConvoById, } from '../../interactions/conversationInteractions'; -import { - ConversationInteractionStatus, - ConversationInteractionType, -} from '../../interactions/types'; import { ConversationNotificationSetting, ConversationNotificationSettingType, } from '../../models/conversationAttributes'; -import { getConversationController } from '../../session/conversations'; +import { ConvoHub } from '../../session/conversations'; import { PubKey } from '../../session/types'; import { changeNickNameModal, updateConfirmModal, updateUserDetailsModal, } from '../../state/ducks/modalDialog'; -import { getIsMessageSection } from '../../state/selectors/section'; +import { useConversationIdOrigin } from '../../state/selectors/conversations'; +import { + getIsMessageRequestOverlayShown, + getIsMessageSection, +} from '../../state/selectors/section'; import { useSelectedConversationKey } from '../../state/selectors/selectedConversation'; import type { LocalizerToken } from '../../types/localizer'; import { SessionButtonColor } from '../basic/SessionButton'; import { ItemWithDataTestId } from './items/MenuItemWithDataTestId'; +import { + ConversationInteractionStatus, + ConversationInteractionType, +} from '../../interactions/types'; +import { useLibGroupDestroyed } from '../../state/selectors/userGroups'; /** Menu items standardized */ @@ -84,9 +89,14 @@ export const MarkConversationUnreadMenuItem = (): JSX.Element | null => { const isMessagesSection = useSelector(getIsMessageSection); const isPrivate = useIsPrivate(conversationId); const isPrivateAndFriend = useIsPrivateAndFriend(conversationId); + const isMessageRequestShown = useSelector(getIsMessageRequestOverlayShown); - if (isMessagesSection && (!isPrivate || (isPrivate && isPrivateAndFriend))) { - const conversation = getConversationController().get(conversationId); + if ( + isMessagesSection && + !isMessageRequestShown && + (!isPrivate || (isPrivate && isPrivateAndFriend)) + ) { + const conversation = ConvoHub.use().get(conversationId); const markUnread = () => { void conversation?.markAsUnread(true); @@ -129,9 +139,10 @@ export const DeletePrivateContactMenuItem = () => { onClickClose, okTheme: SessionButtonColor.Danger, onClickOk: async () => { - await getConversationController().delete1o1(convoId, { + await ConvoHub.use().delete1o1(convoId, { fromSyncMessage: false, justHidePrivate: false, + keepMessages: false, }); }, }) @@ -146,13 +157,12 @@ export const DeletePrivateContactMenuItem = () => { export const LeaveGroupOrCommunityMenuItem = () => { const convoId = useConvoIdFromContext(); const username = useConversationUsername(convoId) || convoId; - const isLeft = useIsLeft(convoId); - const isKickedFromGroup = useIsKickedFromGroup(convoId); const isPrivate = useIsPrivate(convoId); const isPublic = useIsPublic(convoId); const lastMessage = useLastMessage(convoId); + const isMessageRequestShown = useSelector(getIsMessageRequestOverlayShown); - if (!isKickedFromGroup && !isLeft && !isPrivate) { + if (!isPrivate && !isMessageRequestShown) { return ( { @@ -203,11 +213,11 @@ export const ShowUserDetailsMenuItem = () => { export const UpdateGroupNameMenuItem = () => { const convoId = useConvoIdFromContext(); - const left = useIsLeft(convoId); const isKickedFromGroup = useIsKickedFromGroup(convoId); + const isDestroyed = useLibGroupDestroyed(convoId); const weAreAdmin = useWeAreAdmin(convoId); - if (!isKickedFromGroup && !left && weAreAdmin) { + if (!isKickedFromGroup && weAreAdmin && !isDestroyed) { return ( { @@ -224,6 +234,7 @@ export const UpdateGroupNameMenuItem = () => { export const RemoveModeratorsMenuItem = (): JSX.Element | null => { const convoId = useConvoIdFromContext(); const isPublic = useIsPublic(convoId); + const isKickedFromGroup = useIsKickedFromGroup(convoId); const weAreAdmin = useWeAreAdmin(convoId); @@ -380,8 +391,9 @@ export const ChangeNicknameMenuItem = () => { */ export const DeleteMessagesMenuItem = () => { const convoId = useConvoIdFromContext(); + const isMessageRequestShown = useSelector(getIsMessageRequestOverlayShown); - if (!convoId) { + if (!convoId || isMessageRequestShown) { return null; } return ( @@ -414,7 +426,7 @@ export const DeletePrivateConversationMenuItem = () => { return ( { - showLeavePrivateConversationbyConvoId(convoId); + showLeavePrivateConversationByConvoId(convoId); }} > {isMe ? window.i18n('noteToSelfHide') : window.i18n('conversationsDelete')} @@ -425,17 +437,16 @@ export const DeletePrivateConversationMenuItem = () => { export const AcceptMsgRequestMenuItem = () => { const convoId = useConvoIdFromContext(); const isRequest = useIsIncomingRequest(convoId); - const convo = getConversationController().get(convoId); const isPrivate = useIsPrivate(convoId); - if (isRequest && isPrivate) { + if (isRequest && (isPrivate || PubKey.is03Pubkey(convoId))) { return ( { - await convo.setDidApproveMe(true); - await convo.addOutgoingApprovalMessage(Date.now()); - await approveConvoAndSendResponse(convoId); + await handleAcceptConversationRequest({ + convoId, + }); }} > {window.i18n('accept')} @@ -450,20 +461,21 @@ export const DeclineMsgRequestMenuItem = () => { const isRequest = useIsIncomingRequest(convoId); const isPrivate = useIsPrivate(convoId); const selected = useSelectedConversationKey(); - - if (isPrivate && isRequest) { + const isGroupV2 = useIsGroupV2(convoId); + if ((isPrivate || isGroupV2) && isRequest) { return ( { declineConversationWithConfirm({ conversationId: convoId, syncToDevices: true, - blockContact: false, + alsoBlock: false, currentlySelectedConvo: selected || undefined, + conversationIdOrigin: null, }); }} > - {window.i18n('decline')} + {window.i18n('delete')} ); } @@ -475,16 +487,20 @@ export const DeclineAndBlockMsgRequestMenuItem = () => { const isRequest = useIsIncomingRequest(convoId); const selected = useSelectedConversationKey(); const isPrivate = useIsPrivate(convoId); + const isGroupV2 = useIsGroupV2(convoId); + const convoOrigin = useConversationIdOrigin(convoId); - if (isRequest && isPrivate) { + if (isRequest && (isPrivate || (isGroupV2 && convoOrigin))) { + // to block the author of a groupv2 invite we need the convoOrigin set return ( { declineConversationWithConfirm({ conversationId: convoId, syncToDevices: true, - blockContact: true, + alsoBlock: true, currentlySelectedConvo: selected || undefined, + conversationIdOrigin: convoOrigin ?? null, }); }} > @@ -503,15 +519,18 @@ export const NotificationForConvoMenuItem = (): JSX.Element | null => { const currentNotificationSetting = useNotificationSetting(convoId); const isBlocked = useIsBlocked(convoId); const isActive = useIsActive(convoId); - const isLeft = useIsLeft(convoId); const isKickedFromGroup = useIsKickedFromGroup(convoId); + const isGroupDestroyed = useLibGroupDestroyed(convoId); + const isFriend = useIsPrivateAndFriend(convoId); const isPrivate = useIsPrivate(convoId); + const isMessageRequestShown = useSelector(getIsMessageRequestOverlayShown); if ( !convoId || - isLeft || + isMessageRequestShown || isKickedFromGroup || + isGroupDestroyed || isBlocked || !isActive || (isPrivate && !isFriend) diff --git a/ts/components/registration/RegistrationStages.tsx b/ts/components/registration/RegistrationStages.tsx index e2096ed721..e139a3a9b7 100644 --- a/ts/components/registration/RegistrationStages.tsx +++ b/ts/components/registration/RegistrationStages.tsx @@ -2,7 +2,7 @@ import { shell } from 'electron'; import { AnimatePresence } from 'framer-motion'; import styled from 'styled-components'; import { Data } from '../../data/data'; -import { getConversationController } from '../../session/conversations'; +import { ConvoHub } from '../../session/conversations'; import { AccountCreation, AccountRestoration, @@ -24,8 +24,8 @@ export async function resetRegistration() { await Data.removeAll(); Storage.reset(); await Storage.fetch(); - getConversationController().reset(); - await getConversationController().load(); + ConvoHub.use().reset(); + await ConvoHub.use().load(); } const StyledRegistrationContainer = styled(Flex)` diff --git a/ts/components/settings/SessionNotificationGroupSettings.tsx b/ts/components/settings/SessionNotificationGroupSettings.tsx index 07ece6eae9..e74d3c3b66 100644 --- a/ts/components/settings/SessionNotificationGroupSettings.tsx +++ b/ts/components/settings/SessionNotificationGroupSettings.tsx @@ -6,16 +6,11 @@ import { SettingsKey } from '../../data/settings-key'; import { isAudioNotificationSupported } from '../../types/Settings'; import { Notifications } from '../../util/notifications'; import { SessionButton } from '../basic/SessionButton'; -import { SessionRadioGroup } from '../basic/SessionRadioGroup'; +import { SessionRadioGroup, SessionRadioItems } from '../basic/SessionRadioGroup'; import { SpacerLG } from '../basic/Text'; import { SessionSettingsItemWrapper, SessionToggleWithDescription } from './SessionSettingListItem'; -enum NOTIFICATION { - MESSAGE = 'message', - NAME = 'name', - COUNT = 'count', - OFF = 'off', -} +const NotificationType = { message: 'message', name: 'name', count: 'count', off: 'off' } as const; const StyledButtonContainer = styled.div` display: flex; @@ -28,28 +23,32 @@ export const SessionNotificationGroupSettings = () => { const forceUpdate = useUpdate(); const initialNotificationEnabled = - window.getSettingValue(SettingsKey.settingsNotification) || NOTIFICATION.MESSAGE; + window.getSettingValue(SettingsKey.settingsNotification) || NotificationType.message; const initialAudioNotificationEnabled = window.getSettingValue(SettingsKey.settingsAudioNotification) || false; const notificationsAreEnabled = - initialNotificationEnabled && initialNotificationEnabled !== NOTIFICATION.OFF; + initialNotificationEnabled && initialNotificationEnabled !== NotificationType.off; - const items = [ + const options = [ { label: window.i18n('notificationsContentShowNameAndContent'), - value: NOTIFICATION.MESSAGE, - }, - { - label: window.i18n('notificationsContentShowNameOnly'), - value: NOTIFICATION.NAME, + value: NotificationType.message, }, + { label: window.i18n('notificationsContentShowNameOnly'), value: NotificationType.name }, { label: window.i18n('notificationsContentShowNoNameOrContent'), - value: NOTIFICATION.COUNT, + value: NotificationType.count, }, - ]; + ] as const; + + const items: SessionRadioItems = options.map(m => ({ + label: m.label, + value: m.value, + inputDataTestId: `input-${m.value}`, + labelDataTestId: `label-${m.value}`, + })); const onClickPreview = () => { if (!notificationsAreEnabled) { @@ -71,7 +70,7 @@ export const SessionNotificationGroupSettings = () => { onClickToggle={async () => { await window.setSettingValue( SettingsKey.settingsNotification, - notificationsAreEnabled ? NOTIFICATION.OFF : NOTIFICATION.MESSAGE + notificationsAreEnabled ? 'off' : 'message' ); forceUpdate(); }} diff --git a/ts/components/settings/SessionSettingListItem.tsx b/ts/components/settings/SessionSettingListItem.tsx index 6f6b5e5d24..5aa4765051 100644 --- a/ts/components/settings/SessionSettingListItem.tsx +++ b/ts/components/settings/SessionSettingListItem.tsx @@ -22,7 +22,7 @@ type ButtonSettingsProps = { buttonType?: SessionButtonType; buttonShape?: SessionButtonShape; buttonText: string; - dataTestId?: string; + dataTestId?: React.SessionDataTestId; onClick: () => void; }; @@ -160,7 +160,7 @@ export const SessionToggleWithDescription = (props: { onClickToggle: () => void; confirmationDialogParams?: SessionConfirmDialogProps; childrenDescription?: ReactNode; // if set, those elements will be appended next to description field (only used for typing message settings as of now) - dataTestId?: string; + dataTestId?: React.SessionDataTestId; }) => { const { title, diff --git a/ts/components/settings/SessionSettings.tsx b/ts/components/settings/SessionSettings.tsx index 86f913b2a0..fee434e4d0 100644 --- a/ts/components/settings/SessionSettings.tsx +++ b/ts/components/settings/SessionSettings.tsx @@ -73,7 +73,7 @@ const SessionInfo = () => { { void shell.openExternal( - `https://github.com/oxen-io/session-desktop/releases/tag/v${window.versionInfo.version}` + `https://github.com/session-foundation/session-desktop/releases/tag/v${window.versionInfo.version}` ); }} > diff --git a/ts/components/settings/section/CategoryConversations.tsx b/ts/components/settings/section/CategoryConversations.tsx index 6560fe5879..f59d2cd38e 100644 --- a/ts/components/settings/section/CategoryConversations.tsx +++ b/ts/components/settings/section/CategoryConversations.tsx @@ -6,7 +6,7 @@ import { ToastUtils } from '../../../session/utils'; import { toggleAudioAutoplay } from '../../../state/ducks/userConfig'; import { useHasEnterSendEnabled } from '../../../state/selectors/settings'; import { getAudioAutoplay } from '../../../state/selectors/userConfig'; -import { SessionRadioGroup } from '../../basic/SessionRadioGroup'; +import { SessionRadioGroup, SessionRadioItems } from '../../basic/SessionRadioGroup'; import { BlockedContactsList } from '../BlockedList'; import { SessionSettingsItemWrapper, @@ -87,14 +87,18 @@ const EnterKeyFunctionSetting = () => { const initialSetting = useHasEnterSendEnabled(); const selectedWithSettingTrue = 'enterForNewLine'; - const items = [ + const items: SessionRadioItems = [ { label: window.i18n('conversationsEnterSends'), value: 'enterForSend', + inputDataTestId: 'input-enterForSend', + labelDataTestId: 'label-enterForSend', }, { label: window.i18n('conversationsEnterNewLine'), value: selectedWithSettingTrue, + inputDataTestId: `input-${selectedWithSettingTrue}`, + labelDataTestId: `label-${selectedWithSettingTrue}`, }, ]; diff --git a/ts/components/settings/section/CategoryPrivacy.tsx b/ts/components/settings/section/CategoryPrivacy.tsx index 75a357ed8c..aacef1775a 100644 --- a/ts/components/settings/section/CategoryPrivacy.tsx +++ b/ts/components/settings/section/CategoryPrivacy.tsx @@ -8,7 +8,7 @@ import { SpacerLG } from '../../basic/Text'; import { TypingBubble } from '../../conversation/TypingBubble'; import { UserUtils } from '../../../session/utils'; -import { ConfigurationSync } from '../../../session/utils/job_runners/jobs/ConfigurationSyncJob'; +import { UserSync } from '../../../session/utils/job_runners/jobs/UserSyncJob'; import { SessionUtilUserProfile } from '../../../session/utils/libsession/libsession_utils_user_profile'; import { useHasBlindedMsgRequestsEnabled, @@ -99,7 +99,7 @@ export const SettingsCategoryPrivacy = (props: { await SessionUtilUserProfile.insertUserProfileIntoWrapper( UserUtils.getOurPubKeyStrFromCache() ); - await ConfigurationSync.queueNewJobIfNeeded(); + await UserSync.queueNewJobIfNeeded(); forceUpdate(); }} title={window.i18n('messageRequestsCommunities')} diff --git a/ts/components/settings/section/CategoryRecoveryPassword.tsx b/ts/components/settings/section/CategoryRecoveryPassword.tsx index aed50bf327..fe21506150 100644 --- a/ts/components/settings/section/CategoryRecoveryPassword.tsx +++ b/ts/components/settings/section/CategoryRecoveryPassword.tsx @@ -10,7 +10,7 @@ import { updateLightBoxOptions, } from '../../../state/ducks/modalDialog'; import { showSettingsSection } from '../../../state/ducks/section'; -import { getIsModalVisble } from '../../../state/selectors/modal'; +import { getIsModalVisible } from '../../../state/selectors/modal'; import { useHideRecoveryPasswordEnabled } from '../../../state/selectors/settings'; import { useIsDarkTheme } from '../../../state/selectors/theme'; import { THEME_GLOBALS } from '../../../themes/globals'; @@ -74,7 +74,7 @@ export const SettingsCategoryRecoveryPassword = () => { const [isQRVisible, setIsQRVisible] = useState(false); const hideRecoveryPassword = useHideRecoveryPasswordEnabled(); - const isModalVisible = useSelector(getIsModalVisble); + const isModalVisible = useSelector(getIsModalVisible); const isDarkTheme = useIsDarkTheme(); const { dataURL, iconSize, iconColor, backgroundColor, loading } = useIconToImageURL(qrLogoProps); diff --git a/ts/data/configDump/configDump.ts b/ts/data/configDump/configDump.ts index 336cf401ab..ae3f43b20f 100644 --- a/ts/data/configDump/configDump.ts +++ b/ts/data/configDump/configDump.ts @@ -1,11 +1,12 @@ +import { GroupPubkeyType } from 'libsession_util_nodejs'; import { AsyncObjectWrapper, ConfigDumpDataNode, ConfigDumpRow } from '../../types/sqlSharedTypes'; // eslint-disable-next-line import/no-unresolved, import/extensions -import { ConfigWrapperObjectTypes } from '../../webworker/workers/browser/libsession_worker_functions'; +import { ConfigWrapperObjectTypesMeta } from '../../webworker/workers/browser/libsession_worker_functions'; import { channels } from '../channels'; import { cleanData } from '../dataUtils'; export const ConfigDumpData: AsyncObjectWrapper = { - getByVariantAndPubkey: (variant: ConfigWrapperObjectTypes, pubkey: string) => { + getByVariantAndPubkey: (variant: ConfigWrapperObjectTypesMeta, pubkey: string) => { return channels.getByVariantAndPubkey(variant, pubkey); }, saveConfigDump: (dump: ConfigDumpRow) => { @@ -17,4 +18,10 @@ export const ConfigDumpData: AsyncObjectWrapper = { getAllDumpsWithoutData: () => { return channels.getAllDumpsWithoutData(); }, + getAllDumpsWithoutDataFor: (pk: string) => { + return channels.getAllDumpsWithoutDataFor(pk); + }, + deleteDumpFor: (pk: GroupPubkeyType) => { + return channels.deleteDumpFor(pk); + }, }; diff --git a/ts/data/data.ts b/ts/data/data.ts index db8a00f6e4..7ee9441429 100644 --- a/ts/data/data.ts +++ b/ts/data/data.ts @@ -1,5 +1,6 @@ // eslint:disable: no-require-imports no-var-requires one-variable-per-declaration no-void-expression function-name +import { GroupPubkeyType } from 'libsession_util_nodejs'; import _, { isEmpty } from 'lodash'; import { ConversationModel } from '../models/conversation'; import { ConversationAttributes } from '../models/conversationAttributes'; @@ -25,6 +26,11 @@ import { channels } from './channels'; import * as dataInit from './dataInit'; import { cleanData } from './dataUtils'; import { SNODE_POOL_ITEM_ID } from './settings-key'; +import { + FindAllMessageFromSendersInConversationTypeArgs, + FindAllMessageHashesInConversationMatchingAuthorTypeArgs, + FindAllMessageHashesInConversationTypeArgs, +} from './sharedDataTypes'; import { GuardNode, Snode } from './types'; const ERASE_SQL_KEY = 'erase-sql-key'; @@ -209,12 +215,12 @@ async function cleanLastHashes(): Promise { await channels.cleanLastHashes(); } -async function saveSeenMessageHashes( - data: Array<{ - expiresAt: number; - hash: string; - }> -): Promise { +export type SeenMessageHashes = { + expiresAt: number; + hash: string; +}; + +async function saveSeenMessageHashes(data: Array): Promise { await channels.saveSeenMessageHashes(cleanData(data)); } @@ -266,6 +272,22 @@ async function removeMessagesByIds(ids: Array): Promise { await channels.removeMessagesByIds(ids); } +async function removeAllMessagesInConversationSentBefore(args: { + deleteBeforeSeconds: number; + conversationId: GroupPubkeyType; +}): Promise> { + return channels.removeAllMessagesInConversationSentBefore(args); +} + +async function getAllMessagesWithAttachmentsInConversationSentBefore(args: { + deleteAttachBeforeSeconds: number; + conversationId: GroupPubkeyType; +}): Promise> { + const msgAttrs = await channels.getAllMessagesWithAttachmentsInConversationSentBefore(args); + + return msgAttrs.map((msg: any) => new MessageModel(msg)); +} + async function getMessageIdsFromServerIds( serverIds: Array | Array, conversationId: string @@ -551,6 +573,42 @@ async function removeAllMessagesInConversation(conversationId: string): Promise< ); } +async function findAllMessageFromSendersInConversation( + args: FindAllMessageFromSendersInConversationTypeArgs +): Promise> { + const msgAttrs = await channels.findAllMessageFromSendersInConversation(args); + + if (!msgAttrs || isEmpty(msgAttrs)) { + return []; + } + + return msgAttrs.map((msg: any) => new MessageModel(msg)); +} + +async function findAllMessageHashesInConversation( + args: FindAllMessageHashesInConversationTypeArgs +): Promise> { + const msgAttrs = await channels.findAllMessageHashesInConversation(args); + + if (!msgAttrs || isEmpty(msgAttrs)) { + return []; + } + + return msgAttrs.map((msg: any) => new MessageModel(msg)); +} + +async function findAllMessageHashesInConversationMatchingAuthor( + args: FindAllMessageHashesInConversationMatchingAuthorTypeArgs +): Promise> { + const msgAttrs = await channels.findAllMessageHashesInConversationMatchingAuthor(args); + + if (!msgAttrs || isEmpty(msgAttrs)) { + return []; + } + + return msgAttrs.map((msg: any) => new MessageModel(msg)); +} + async function getMessagesBySentAt(sentAt: number): Promise { const messages = await channels.getMessagesBySentAt(sentAt); return new MessageCollection(messages); @@ -806,6 +864,8 @@ export const Data = { saveMessages, removeMessage, removeMessagesByIds, + removeAllMessagesInConversationSentBefore, + getAllMessagesWithAttachmentsInConversationSentBefore, cleanUpExpirationTimerUpdateHistory, getMessageIdsFromServerIds, getMessageById, @@ -830,6 +890,9 @@ export const Data = { getLastHashBySnode, getSeenMessagesByHashList, removeAllMessagesInConversation, + findAllMessageFromSendersInConversation, + findAllMessageHashesInConversation, + findAllMessageHashesInConversationMatchingAuthor, getMessagesBySentAt, getExpiredMessages, getOutgoingWithoutExpiresAt, diff --git a/ts/data/dataInit.ts b/ts/data/dataInit.ts index 3577c9f548..44de6f867c 100644 --- a/ts/data/dataInit.ts +++ b/ts/data/dataInit.ts @@ -42,6 +42,8 @@ const channelsToMake = new Set([ 'saveMessages', 'removeMessage', 'removeMessagesByIds', + 'getAllMessagesWithAttachmentsInConversationSentBefore', + 'removeAllMessagesInConversationSentBefore', 'cleanUpExpirationTimerUpdateHistory', 'getUnreadByConversation', 'getUnreadDisappearingByConversation', @@ -49,6 +51,9 @@ const channelsToMake = new Set([ 'getUnreadCountByConversation', 'getMessageCountByType', 'removeAllMessagesInConversation', + 'findAllMessageFromSendersInConversation', + 'findAllMessageHashesInConversation', + 'findAllMessageHashesInConversationMatchingAuthor', 'getMessageCount', 'filterAlreadyFetchedOpengroupMessage', 'getMessagesBySenderAndSentAt', diff --git a/ts/data/sharedDataTypes.ts b/ts/data/sharedDataTypes.ts new file mode 100644 index 0000000000..42083d210d --- /dev/null +++ b/ts/data/sharedDataTypes.ts @@ -0,0 +1,17 @@ +import { PubkeyType, WithGroupPubkey } from 'libsession_util_nodejs'; + +export type FindAllMessageFromSendersInConversationTypeArgs = WithGroupPubkey & { + toRemove: Array; + signatureTimestamp: number; +}; + +export type FindAllMessageHashesInConversationTypeArgs = WithGroupPubkey & { + messageHashes: Array; + signatureTimestamp: number; +}; + +export type FindAllMessageHashesInConversationMatchingAuthorTypeArgs = WithGroupPubkey & { + messageHashes: Array; + author: PubkeyType; + signatureTimestamp: number; +}; diff --git a/ts/hooks/useEncryptedFileFetch.ts b/ts/hooks/useEncryptedFileFetch.ts index 19af3af172..23eb9f9357 100644 --- a/ts/hooks/useEncryptedFileFetch.ts +++ b/ts/hooks/useEncryptedFileFetch.ts @@ -1,9 +1,6 @@ import { useCallback, useEffect, useState } from 'react'; -import { - getAlreadyDecryptedMediaUrl, - getDecryptedMediaUrl, -} from '../session/crypto/DecryptedAttachmentsManager'; +import { DecryptedAttachmentsManager } from '../session/crypto/DecryptedAttachmentsManager'; import { perfEnd, perfStart } from '../session/utils/Performance'; export const useEncryptedFileFetch = ( @@ -17,7 +14,7 @@ export const useEncryptedFileFetch = ( const [urlToLoad, setUrlToLoad] = useState(undefined); const [loading, setLoading] = useState(true); - const alreadyDecrypted = url ? getAlreadyDecryptedMediaUrl(url) : ''; + const alreadyDecrypted = url ? DecryptedAttachmentsManager.getAlreadyDecryptedMediaUrl(url) : ''; const fetchUrl = useCallback( async (mediaUrl: string | undefined) => { @@ -33,7 +30,11 @@ export const useEncryptedFileFetch = ( try { perfStart(`getDecryptedMediaUrl-${mediaUrl}-${timestamp}`); - const decryptedUrl = await getDecryptedMediaUrl(mediaUrl, contentType, isAvatar); + const decryptedUrl = await DecryptedAttachmentsManager.getDecryptedMediaUrl( + mediaUrl, + contentType, + isAvatar + ); perfEnd( `getDecryptedMediaUrl-${mediaUrl}-${timestamp}`, `getDecryptedMediaUrl-${mediaUrl}-${timestamp}` diff --git a/ts/hooks/useParamSelector.ts b/ts/hooks/useParamSelector.ts index b6db344f61..3ce69a42f9 100644 --- a/ts/hooks/useParamSelector.ts +++ b/ts/hooks/useParamSelector.ts @@ -1,4 +1,5 @@ import { createSelector } from '@reduxjs/toolkit'; +import { PubkeyType } from 'libsession_util_nodejs'; import { compact, isEmpty, isFinite, isNumber, pick } from 'lodash'; import { useMemo } from 'react'; import { useSelector } from 'react-redux'; @@ -6,6 +7,7 @@ import { hasValidIncomingRequestValues, hasValidOutgoingRequestValues, } from '../models/conversation'; +import { ConversationTypeEnum } from '../models/types'; import { isUsAnySogsFromCache } from '../session/apis/open_group_api/sogsv3/knownBlindedkeys'; import { TimerOptions, TimerOptionsArray } from '../session/disappearing_messages/timerOptions'; import { PubKey } from '../session/types'; @@ -16,7 +18,14 @@ import { getMessagePropsByMessageId, getMessageReactsProps, } from '../state/selectors/conversations'; +import { useLibGroupAdmins, useLibGroupMembers, useLibGroupName } from '../state/selectors/groups'; import { isPrivateAndFriend } from '../state/selectors/selectedConversation'; +import { useOurPkStr } from '../state/selectors/user'; +import { + useLibGroupDestroyed, + useLibGroupInvitePending, + useLibGroupKicked, +} from '../state/selectors/userGroups'; export function useAvatarPath(convoId: string | undefined) { const convoProps = useConversationPropsById(convoId); @@ -33,7 +42,17 @@ export function useOurAvatarPath() { */ export function useConversationUsername(convoId?: string) { const convoProps = useConversationPropsById(convoId); + const groupName = useLibGroupName(convoId); + if (convoId && PubKey.is03Pubkey(convoId) && groupName) { + // when getting a new 03 group from the user group wrapper, + // we set the displayNameInProfile with the name from the wrapper. + // So let's keep falling back to convoProps?.displayNameInProfile if groupName is not set yet (it comes later through the groupInfos namespace) + return groupName; + } + if (convoId && (PubKey.is03Pubkey(convoId) || PubKey.is05Pubkey(convoId))) { + return convoProps?.nickname || convoProps?.displayNameInProfile || PubKey.shorten(convoId); + } return convoProps?.nickname || convoProps?.displayNameInProfile || convoId; } @@ -60,18 +79,51 @@ export function useConversationRealName(convoId?: string) { return convoProps?.isPrivate ? convoProps?.displayNameInProfile : undefined; } +function usernameForQuoteOrFullPk(pubkey: string, state: StateType) { + if (pubkey === UserUtils.getOurPubKeyStrFromCache() || pubkey.toLowerCase() === 'you') { + return window.i18n('you'); + } + // use the name from the cached libsession wrappers if available + if (PubKey.is03Pubkey(pubkey)) { + const info = state.groups.infos[pubkey]; + if (info && info.name) { + return info.name; + } + } + const convo = state.conversations.conversationLookup[pubkey]; + + const nameGot = convo?.nickname || convo?.displayNameInProfile; + return nameGot?.length ? nameGot : null; +} + +export function usernameForQuoteOrFullPkOutsideRedux(pubkey: string) { + if (window?.inboxStore?.getState()) { + return usernameForQuoteOrFullPk(pubkey, window.inboxStore.getState()) || PubKey.shorten(pubkey); + } + return PubKey.shorten(pubkey); +} + /** * Returns either the nickname, the profileName, in '"' or the full pubkeys given */ export function useConversationsUsernameWithQuoteOrFullPubkey(pubkeys: Array) { return useSelector((state: StateType) => { return pubkeys.map(pubkey => { - if (pubkey === UserUtils.getOurPubKeyStrFromCache() || pubkey.toLowerCase() === 'you') { - return window.i18n('you'); - } - const convo = state.conversations.conversationLookup[pubkey]; - const nameGot = convo?.displayNameInProfile; - return nameGot?.length ? `"${nameGot}"` : pubkey; + const nameGot = usernameForQuoteOrFullPk(pubkey, state); + return nameGot?.length ? nameGot : pubkey; + }); + }); +} + +/** + * Returns either the nickname, the profileName, a shortened pubkey, or "you" for our own pubkey + */ +export function useConversationsUsernameWithQuoteOrShortPk(pubkeys: Array) { + return useSelector((state: StateType) => { + return pubkeys.map(pubkey => { + const nameGot = usernameForQuoteOrFullPk(pubkey, state); + + return nameGot?.length ? nameGot : PubKey.shorten(pubkey); }); }); } @@ -136,6 +188,11 @@ export function useNotificationSetting(convoId?: string) { return convoProps?.currentNotificationSetting || 'all'; } +export function useIsGroupV2(convoId?: string) { + const convoProps = useConversationPropsById(convoId); + return convoId && convoProps?.type === ConversationTypeEnum.GROUPV2 && PubKey.is03Pubkey(convoId); +} + export function useIsPublic(convoId?: string) { const convoProps = useConversationPropsById(convoId); return Boolean(convoProps && convoProps.isPublic); @@ -155,19 +212,39 @@ export function useIsActive(convoId?: string) { return !!useActiveAt(convoId); } -export function useIsLeft(convoId?: string) { +export function useIsKickedFromGroup(convoId?: string) { const convoProps = useConversationPropsById(convoId); - return Boolean(convoProps && convoProps.left); + const libIsKicked = useLibGroupKicked(convoId); + if (convoId && PubKey.is03Pubkey(convoId)) { + return libIsKicked; + } + return Boolean(convoProps && (convoProps.isKickedFromGroup || libIsKicked)); // not ideal, but until we trust what we get from libsession for all cases, we have to either trust what we have in the DB } -export function useIsKickedFromGroup(convoId?: string) { - const convoProps = useConversationPropsById(convoId); - return Boolean(convoProps && convoProps.isKickedFromGroup); +export function useIsGroupDestroyed(convoId?: string) { + const libIsDestroyed = useLibGroupDestroyed(convoId); + if (convoId && PubKey.is03Pubkey(convoId)) { + return libIsDestroyed; + } + return false; } export function useWeAreAdmin(convoId?: string) { + const groupAdmins = useGroupAdmins(convoId); + const us = useOurPkStr(); + return Boolean(groupAdmins.includes(us)); +} + +export function useGroupAdmins(convoId?: string) { const convoProps = useConversationPropsById(convoId); - return Boolean(convoProps && convoProps.weAreAdmin); + + const libMembers = useLibGroupAdmins(convoId); + + if (convoId && PubKey.is03Pubkey(convoId)) { + return compact(libMembers?.slice()?.sort()) || []; + } + + return convoProps?.groupAdmins || []; } export function useExpireTimer(convoId?: string) { @@ -192,18 +269,21 @@ export function useIsApproved(convoId?: string) { export function useIsIncomingRequest(convoId?: string) { const convoProps = useConversationPropsById(convoId); + const invitePending = useLibGroupInvitePending(convoId) || false; if (!convoProps) { return false; } return Boolean( convoProps && hasValidIncomingRequestValues({ + id: convoProps.id, isMe: convoProps.isMe || false, isApproved: convoProps.isApproved || false, isPrivate: convoProps.isPrivate || false, isBlocked: convoProps.isBlocked || false, didApproveMe: convoProps.didApproveMe || false, activeAt: convoProps.activeAt || 0, + invitePending, }) ); } @@ -227,7 +307,13 @@ export function useIsOutgoingRequest(convoId?: string) { ); } -export function useConversationPropsById(convoId?: string) { +/** + * Note: NOT to be exported: + * This selector is too generic and needs to be broken node in individual fields selectors. + * Make sure when writing a selector that you fetch the data from libsession if needed. + * (check useSortedGroupMembers() as an example) + */ +function useConversationPropsById(convoId?: string) { return useSelector((state: StateType) => { if (!convoId) { return null; @@ -240,6 +326,19 @@ export function useConversationPropsById(convoId?: string) { }); } +export function useZombies(convoId?: string) { + return useSelector((state: StateType) => { + if (!convoId) { + return null; + } + const convo = state.conversations.conversationLookup[convoId]; + if (!convo) { + return null; + } + return convo.zombies; + }); +} + export function useMessageReactsPropsById(messageId?: string) { return useSelector((state: StateType) => { if (!messageId) { @@ -382,17 +481,28 @@ export function useQuoteAuthorName(authorId?: string): { return { authorName, isMe }; } +function useMembers(convoId: string | undefined) { + const props = useConversationPropsById(convoId); + return props?.members || undefined; +} + /** * Get the list of members of a closed group or [] * @param convoId the closed group id to extract members from */ -export function useSortedGroupMembers(convoId: string | undefined): Array { - const convoProps = useConversationPropsById(convoId); - if (!convoProps || convoProps.isPrivate || convoProps.isPublic) { +export function useSortedGroupMembers(convoId: string | undefined): Array { + const members = useMembers(convoId); + const isPublic = useIsPublic(convoId); + const isPrivate = useIsPrivate(convoId); + const libMembers = useLibGroupMembers(convoId); + if (isPrivate || isPublic) { return []; } + if (convoId && PubKey.is03Pubkey(convoId)) { + return compact(libMembers?.slice()?.sort()) || []; + } // we need to clone the array before being able to call sort() it - return compact(convoProps.members?.slice()?.sort()) || []; + return (compact(members?.slice()?.sort()) || []) as Array; } export function useDisappearingMessageSettingText({ diff --git a/ts/interactions/conversationInteractions.ts b/ts/interactions/conversationInteractions.ts index 5f02652920..365ce475bb 100644 --- a/ts/interactions/conversationInteractions.ts +++ b/ts/interactions/conversationInteractions.ts @@ -1,9 +1,10 @@ -import { isNil } from 'lodash'; +import { isEmpty, isNil, uniq } from 'lodash'; +import { PubkeyType, WithGroupPubkey } from 'libsession_util_nodejs'; import { ConversationNotificationSettingType, READ_MESSAGE_STATE, } from '../models/conversationAttributes'; -import { CallManager, SyncUtils, ToastUtils, UserUtils } from '../session/utils'; +import { CallManager, PromiseUtils, SyncUtils, ToastUtils, UserUtils } from '../session/utils'; import { SessionButtonColor } from '../components/basic/SessionButton'; import { getCallMediaPermissionsSettings } from '../components/settings/SessionSettings'; @@ -12,14 +13,16 @@ import { SettingsKey } from '../data/settings-key'; import { ConversationTypeEnum } from '../models/types'; import { uploadFileToFsWithOnionV4 } from '../session/apis/file_server_api/FileServerApi'; import { OpenGroupUtils } from '../session/apis/open_group_api/utils'; -import { GetNetworkTime } from '../session/apis/snode_api/getNetworkTime'; -import { getConversationController } from '../session/conversations'; +import { getSwarmPollingInstance } from '../session/apis/snode_api'; +import { ConvoHub } from '../session/conversations'; import { getSodiumRenderer } from '../session/crypto'; -import { getDecryptedMediaUrl } from '../session/crypto/DecryptedAttachmentsManager'; +import { DecryptedAttachmentsManager } from '../session/crypto/DecryptedAttachmentsManager'; import { DisappearingMessageConversationModeType } from '../session/disappearing_messages/types'; +import { PubKey } from '../session/types'; import { perfEnd, perfStart } from '../session/utils/Performance'; -import { fromHexToArray, toHex } from '../session/utils/String'; -import { ConfigurationSync } from '../session/utils/job_runners/jobs/ConfigurationSyncJob'; +import { sleepFor } from '../session/utils/Promise'; +import { ed25519Str, fromHexToArray, toHex } from '../session/utils/String'; +import { UserSync } from '../session/utils/job_runners/jobs/UserSyncJob'; import { SessionUtilContact } from '../session/utils/libsession/libsession_utils_contacts'; import { forceSyncConfigurationNowIfNeeded } from '../session/utils/sync/syncUtils'; import { @@ -48,6 +51,14 @@ import { Storage, setLastProfileUpdateTimestamp } from '../util/storage'; import { UserGroupsWrapperActions } from '../webworker/workers/browser/libsession_worker_interface'; import { ConversationInteractionStatus, ConversationInteractionType } from './types'; import { BlockedNumberController } from '../util'; +import type { LocalizerComponentProps, LocalizerToken } from '../types/localizer'; +import { sendInviteResponseToGroup } from '../session/sending/group/GroupInviteResponse'; +import { NetworkTime } from '../util/NetworkTime'; +import { ClosedGroup } from '../session'; +import { GroupUpdateMessageFactory } from '../session/messages/message_factory/group/groupUpdateMessageFactory'; +import { GroupPromote } from '../session/utils/job_runners/jobs/GroupPromoteJob'; +import { MessageSender } from '../session/sending'; +import { StoreGroupRequestFactory } from '../session/apis/snode_api/factories/StoreGroupRequestFactory'; export async function copyPublicKeyByConvoId(convoId: string) { if (OpenGroupUtils.isOpenGroupV2(convoId)) { @@ -85,48 +96,118 @@ export async function unblockConvoById(conversationId: string) { ); } -/** - * marks the conversation's approval fields, sends messageRequestResponse, syncs to linked devices - */ -export const approveConvoAndSendResponse = async (conversationId: string) => { - const convoToApprove = getConversationController().get(conversationId); +export const handleAcceptConversationRequest = async ({ convoId }: { convoId: string }) => { + const convo = ConvoHub.use().get(convoId); + if (!convo || (!convo.isPrivate() && !convo.isClosedGroupV2())) { + return null; + } + const previousIsApproved = convo.isApproved(); + const previousDidApprovedMe = convo.didApproveMe(); + // Note: we don't mark as approvedMe = true, as we do not know if they did send us a message yet. + await convo.setIsApproved(true, false); + await convo.commit(); + void forceSyncConfigurationNowIfNeeded(); + + if (convo.isPrivate()) { + // we only need the approval message (and sending a reply) when we are accepting a message request. i.e. someone sent us a message already and we didn't accept it yet. + if (!previousIsApproved && previousDidApprovedMe) { + await convo.addOutgoingApprovalMessage(Date.now()); + await convo.sendMessageRequestResponse(); + } - if (!convoToApprove) { - window?.log?.info('Conversation is already approved.'); - return; + return null; } + if (PubKey.is03Pubkey(convoId)) { + const found = await UserGroupsWrapperActions.getGroup(convoId); + if (!found) { + window.log.warn('cannot approve a non existing group in user group'); + return null; + } + // this updates the wrapper and refresh the redux slice + await UserGroupsWrapperActions.setGroup({ ...found, invitePending: false }); - await convoToApprove.setIsApproved(true, false); + // nothing else to do (and especially not wait for first poll) when the convo was already approved + if (previousIsApproved) { + return null; + } + const pollAndSendResponsePromise = new Promise(resolve => { + getSwarmPollingInstance().addGroupId(convoId, async () => { + // we need to do a first poll to fetch the keys etc before we can send our invite response + // this is pretty hacky, but also an admin seeing a message from that user in the group will mark it as not pending anymore + await sleepFor(2000); + if (!previousIsApproved) { + await sendInviteResponseToGroup({ groupPk: convoId }); + } + + window.log.info( + `handleAcceptConversationRequest: first poll for group ${ed25519Str(convoId)} happened, we should have encryption keys now` + ); + return resolve(true); + }); + }); - await convoToApprove.commit(); - await convoToApprove.sendMessageRequestResponse(); + // try at most 10s for the keys, and everything to come before continuing processing. + // Note: this is important as otherwise the polling just hangs when sending a message to a group (as the cb in addGroupId() is never called back) + const timeout = 10000; + try { + await PromiseUtils.timeout(pollAndSendResponsePromise, timeout); + } catch (e) { + window.log.warn( + `handleAcceptConversationRequest: waited ${timeout}ms for first poll of group ${ed25519Str(convoId)} to happen, but timed out with: ${e.message}` + ); + } + } + return null; }; export async function declineConversationWithoutConfirm({ - blockContact, + alsoBlock, conversationId, currentlySelectedConvo, syncToDevices, + conversationIdOrigin, }: { conversationId: string; currentlySelectedConvo: string | undefined; syncToDevices: boolean; - blockContact: boolean; // if set to false, the contact will just be set to not approved + alsoBlock: boolean; + conversationIdOrigin: string | null; }) { - const conversationToDecline = getConversationController().get(conversationId); + const conversationToDecline = ConvoHub.use().get(conversationId); - if (!conversationToDecline || !conversationToDecline.isPrivate()) { + if ( + !conversationToDecline || + (!conversationToDecline.isPrivate() && !conversationToDecline.isClosedGroupV2()) + ) { window?.log?.info('No conversation to decline.'); return; } + window.log.debug( + `declineConversationWithoutConfirm of ${ed25519Str(conversationId)}, alsoBlock:${alsoBlock}, conversationIdOrigin:${conversationIdOrigin ? ed25519Str(conversationIdOrigin) : ''}` + ); // Note: do not set the active_at undefined as this would make that conversation not synced with the libsession wrapper await conversationToDecline.setIsApproved(false, false); await conversationToDecline.setDidApproveMe(false, false); + + if (conversationToDecline.isClosedGroupV2()) { + // this can only be done for groupv2 convos + await conversationToDecline.setOriginConversationID('', false); + } // this will update the value in the wrapper if needed but not remove the entry if we want it gone. The remove is done below with removeContactFromWrapper await conversationToDecline.commit(); - if (blockContact) { - await BlockedNumberController.block(conversationId); + if (alsoBlock) { + if (PubKey.is03Pubkey(conversationId)) { + // Note: if we do want to block this convo, we actually want to block the person who invited us, not the 03 pubkey itself. + // Also, we don't want to show the block/unblock modal in this case + // (we are on the WithoutConfirm function) + if (conversationIdOrigin && !PubKey.is03Pubkey(conversationIdOrigin)) { + // restoring from seed we can be missing the conversationIdOrigin, so we wouldn't be able to block the person who invited us + await BlockedNumberController.block(conversationIdOrigin); + } + } else { + await BlockedNumberController.block(conversationId); + } } // when removing a message request, without blocking it, we actually have no need to store the conversation in the wrapper. So just remove the entry @@ -137,6 +218,18 @@ export async function declineConversationWithoutConfirm({ await SessionUtilContact.removeContactFromWrapper(conversationToDecline.id); } + if (PubKey.is03Pubkey(conversationId)) { + await UserGroupsWrapperActions.eraseGroup(conversationId); + // when deleting a 03 group message request, we also need to remove the conversation altogether + await ConvoHub.use().deleteGroup(conversationId, { + deleteAllMessagesOnSwarm: false, + deletionType: 'doNotKeep', + forceDestroyForAllMembers: false, + fromSyncMessage: false, + sendLeaveMessage: false, + }); + } + if (syncToDevices) { await forceSyncConfigurationNowIfNeeded(); } @@ -148,31 +241,48 @@ export async function declineConversationWithoutConfirm({ export const declineConversationWithConfirm = ({ conversationId, syncToDevices, - blockContact, + alsoBlock, currentlySelectedConvo, + conversationIdOrigin, }: { conversationId: string; currentlySelectedConvo: string | undefined; syncToDevices: boolean; - blockContact: boolean; // if set to false, the contact will just be set to not approved + alsoBlock: boolean; + conversationIdOrigin: string | null; }) => { - const convoName = - getConversationController().get(conversationId)?.getNicknameOrRealUsernameOrPlaceholder() || - window.i18n('unknown'); + const isGroupV2 = PubKey.is03Pubkey(conversationId); + // restoring from seed we might not have the sender of that invite, so we need to take care of not having one (and not block) + const originNameToBlock = + alsoBlock && !!conversationIdOrigin + ? ConvoHub.use().get(conversationIdOrigin)?.getContactProfileNameOrShortenedPubKey() + : null; + + const convoName = ConvoHub.use().get(conversationId)?.getNicknameOrRealUsernameOrPlaceholder(); + + const i18nMessage: LocalizerComponentProps = isGroupV2 + ? alsoBlock && originNameToBlock + ? { token: 'blockDescription', args: { name: originNameToBlock } } // groupv2, and blocking by sender name + : { token: 'groupInviteDelete' } // groupv2, and no info about the sender, falling back to delete only + : alsoBlock + ? { token: 'blockDescription', args: { name: convoName } } + : { token: 'messageRequestsDelete' }; + window?.inboxStore?.dispatch( updateConfirmModal({ - okText: blockContact ? window.i18n('block') : window.i18n('delete'), + okText: alsoBlock ? window.i18n('block') : window.i18n('delete'), cancelText: window.i18n('cancel'), - title: blockContact ? window.i18n('block') : window.i18n('delete'), - i18nMessage: blockContact - ? { token: 'blockDescription', args: { name: convoName } } - : { token: 'messageRequestsDelete' }, + title: alsoBlock ? window.i18n('block') : window.i18n('delete'), + i18nMessage, + okTheme: SessionButtonColor.Danger, + closeTheme: SessionButtonColor.Primary, onClickOk: async () => { await declineConversationWithoutConfirm({ conversationId, currentlySelectedConvo, - blockContact, + alsoBlock, syncToDevices, + conversationIdOrigin, }); }, onClickCancel: () => { @@ -181,40 +291,38 @@ export const declineConversationWithConfirm = ({ onClickClose: () => { window?.inboxStore?.dispatch(updateConfirmModal(null)); }, - okTheme: SessionButtonColor.Danger, - closeTheme: SessionButtonColor.Primary, }) ); }; export async function showUpdateGroupNameByConvoId(conversationId: string) { - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); if (conversation.isClosedGroup()) { // make sure all the members' convo exists so we can add or remove them await Promise.all( conversation - .get('members') - .map(m => getConversationController().getOrCreateAndWait(m, ConversationTypeEnum.PRIVATE)) + .getGroupMembers() + .map(m => ConvoHub.use().getOrCreateAndWait(m, ConversationTypeEnum.PRIVATE)) ); } window.inboxStore?.dispatch(updateGroupNameModal({ conversationId })); } export async function showUpdateGroupMembersByConvoId(conversationId: string) { - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); if (conversation.isClosedGroup()) { // make sure all the members' convo exists so we can add or remove them await Promise.all( conversation - .get('members') - .map(m => getConversationController().getOrCreateAndWait(m, ConversationTypeEnum.PRIVATE)) + .getGroupMembers() + .map(m => ConvoHub.use().getOrCreateAndWait(m, ConversationTypeEnum.PRIVATE)) ); } window.inboxStore?.dispatch(updateGroupMembersModal({ conversationId })); } -export function showLeavePrivateConversationbyConvoId(conversationId: string) { - const conversation = getConversationController().get(conversationId); +export function showLeavePrivateConversationByConvoId(conversationId: string) { + const conversation = ConvoHub.use().get(conversationId); const isMe = conversation.isMe(); if (!conversation.isPrivate()) { @@ -233,14 +341,14 @@ export function showLeavePrivateConversationbyConvoId(conversationId: string) { status: ConversationInteractionStatus.Start, }); onClickClose(); - await getConversationController().delete1o1(conversationId, { + await ConvoHub.use().delete1o1(conversationId, { fromSyncMessage: false, justHidePrivate: true, keepMessages: isMe, }); await clearConversationInteractionState({ conversationId }); } catch (err) { - window.log.warn(`showLeavePrivateConversationbyConvoId error: ${err}`); + window.log.warn(`showLeavePrivateConversationByConvoId error: ${err}`); await saveConversationInteractionErrorAsMessage({ conversationId, interactionType: isMe @@ -270,19 +378,24 @@ export function showLeavePrivateConversationbyConvoId(conversationId: string) { ); } -async function leaveGroupOrCommunityByConvoId( - conversationId: string, - isPublic: boolean, - forceDeleteLocal: boolean, - onClickClose?: () => void -) { +async function leaveGroupOrCommunityByConvoId({ + conversationId, + sendLeaveMessage, + isPublic, + onClickClose, +}: { + conversationId: string; + isPublic: boolean; + sendLeaveMessage: boolean; + onClickClose?: () => void; +}) { try { if (onClickClose) { onClickClose(); } if (isPublic) { - await getConversationController().deleteCommunity(conversationId, { + await ConvoHub.use().deleteCommunity(conversationId, { fromSyncMessage: false, }); return; @@ -294,11 +407,21 @@ async function leaveGroupOrCommunityByConvoId( type: ConversationInteractionType.Leave, status: ConversationInteractionStatus.Start, }); - await getConversationController().deleteClosedGroup(conversationId, { - fromSyncMessage: false, - sendLeaveMessage: true, - forceDeleteLocal, - }); + + if (PubKey.is05Pubkey(conversationId)) { + await ConvoHub.use().deleteLegacyGroup(conversationId, { + fromSyncMessage: false, + sendLeaveMessage, + }); + } else if (PubKey.is03Pubkey(conversationId)) { + await ConvoHub.use().deleteGroup(conversationId, { + fromSyncMessage: false, + sendLeaveMessage, + deleteAllMessagesOnSwarm: false, + deletionType: 'doNotKeep', + forceDestroyForAllMembers: false, + }); + } await clearConversationInteractionState({ conversationId }); } catch (err) { window.log.warn(`showLeaveGroupByConvoId error: ${err}`); @@ -310,7 +433,7 @@ async function leaveGroupOrCommunityByConvoId( } export async function showLeaveGroupByConvoId(conversationId: string, name: string | undefined) { - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); if (!conversation.isGroup()) { throw new Error('showLeaveGroupDialog() called with a non group convo.'); @@ -318,9 +441,12 @@ export async function showLeaveGroupByConvoId(conversationId: string, name: stri const isClosedGroup = conversation.isClosedGroup() || false; const isPublic = conversation.isPublic() || false; - const admins = conversation.get('groupAdmins') || []; + const admins = conversation.getGroupAdmins(); const isAdmin = admins.includes(UserUtils.getOurPubKeyStrFromCache()); - const showOnlyGroupAdminWarning = isClosedGroup && isAdmin && admins.length === 1; + const showOnlyGroupAdminWarning = isClosedGroup && isAdmin; + const weAreLastAdmin = + (PubKey.is05Pubkey(conversationId) && isAdmin && admins.length === 1) || + (PubKey.is03Pubkey(conversationId) && isAdmin && admins.length === 1); const lastMessageInteractionType = conversation.get('lastMessageInteractionType'); const lastMessageInteractionStatus = conversation.get('lastMessageInteractionStatus'); @@ -329,7 +455,7 @@ export async function showLeaveGroupByConvoId(conversationId: string, name: stri lastMessageInteractionType === ConversationInteractionType.Leave && lastMessageInteractionStatus === ConversationInteractionStatus.Error ) { - await leaveGroupOrCommunityByConvoId(conversationId, isPublic, true); + await leaveGroupOrCommunityByConvoId({ conversationId, isPublic, sendLeaveMessage: false }); return; } @@ -340,7 +466,12 @@ export async function showLeaveGroupByConvoId(conversationId: string, name: stri }; const onClickOk = async () => { - await leaveGroupOrCommunityByConvoId(conversationId, isPublic, false, onClickClose); + await leaveGroupOrCommunityByConvoId({ + conversationId, + isPublic, + sendLeaveMessage: !weAreLastAdmin, // we don't need to send a leave message when we are the last admin: the group is removed. + onClickClose, + }); }; if (showOnlyGroupAdminWarning) { @@ -350,7 +481,7 @@ export async function showLeaveGroupByConvoId(conversationId: string, name: stri title: window.i18n('groupLeave'), i18nMessage: { token: 'groupDeleteDescription', - args: { group_name: name ?? '' }, + args: { group_name: name || window.i18n('unknown') }, }, onClickOk, okText: window.i18n('leave'), @@ -359,7 +490,7 @@ export async function showLeaveGroupByConvoId(conversationId: string, name: stri conversationId, }) ); - // TODO Only to be used after the closed group rebuild + // TODO this is post release chunk3 stuff: Only to be used after the closed group rebuild chunk3 // const onClickOkLastAdmin = () => { // /* TODO */ // }; @@ -422,7 +553,7 @@ export function showUnbanUserByConvoId(conversationId: string, pubkey?: string) } export async function markAllReadByConvoId(conversationId: string) { - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); perfStart(`markAllReadByConvoId-${conversationId}`); await conversation?.markAllAsRead(); @@ -434,9 +565,9 @@ export async function setNotificationForConvoId( conversationId: string, selected: ConversationNotificationSettingType ) { - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); - const existingSettings = conversation.get('triggerNotificationsFor'); + const existingSettings = conversation.getNotificationsFor(); if (existingSettings !== selected) { conversation.set({ triggerNotificationsFor: selected }); await conversation.commit(); @@ -444,7 +575,7 @@ export async function setNotificationForConvoId( } export async function clearNickNameByConvoId(conversationId: string) { - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); await conversation.setNickname(null, true); } @@ -453,7 +584,7 @@ export function showChangeNickNameByConvoId(conversationId: string) { } export async function deleteAllMessagesByConvoIdNoConfirmation(conversationId: string) { - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); await Data.removeAllMessagesInConversation(conversationId); // destroy message keeps the active timestamp set so the @@ -494,7 +625,7 @@ export async function setDisappearingMessagesByConvoId( expirationMode: DisappearingMessageConversationModeType, seconds?: number ) { - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); const canSetDisappearing = !conversation.isOutgoingRequest() && !conversation.isIncomingRequest(); @@ -522,12 +653,12 @@ export async function setDisappearingMessagesByConvoId( } /** - * This function can be used for reupload our avatar to the fileserver or upload a new avatar. + * This function can be used for reupload our avatar to the file server or upload a new avatar. * * If this is a reupload, the old profileKey is used, otherwise a new one is generated */ export async function uploadOurAvatar(newAvatarDecrypted?: ArrayBuffer) { - const ourConvo = getConversationController().get(UserUtils.getOurPubKeyStrFromCache()); + const ourConvo = ConvoHub.use().get(UserUtils.getOurPubKeyStrFromCache()); if (!ourConvo) { window.log.warn('ourConvo not found... This is not a valid case'); return null; @@ -542,8 +673,7 @@ export async function uploadOurAvatar(newAvatarDecrypted?: ArrayBuffer) { } else { // this is a reupload. no need to generate a new profileKey const ourConvoProfileKey = - getConversationController().get(UserUtils.getOurPubKeyStrFromCache())?.get('profileKey') || - null; + ConvoHub.use().get(UserUtils.getOurPubKeyStrFromCache())?.getProfileKey() || null; profileKey = ourConvoProfileKey ? fromHexToArray(ourConvoProfileKey) : null; if (!profileKey) { @@ -557,7 +687,11 @@ export async function uploadOurAvatar(newAvatarDecrypted?: ArrayBuffer) { return null; } - const decryptedAvatarUrl = await getDecryptedMediaUrl(currentAttachmentPath, IMAGE_JPEG, true); + const decryptedAvatarUrl = await DecryptedAttachmentsManager.getDecryptedMediaUrl( + currentAttachmentPath, + IMAGE_JPEG, + true + ); if (!decryptedAvatarUrl) { window.log.warn('Could not decrypt avatar stored locally..'); @@ -577,7 +711,7 @@ export async function uploadOurAvatar(newAvatarDecrypted?: ArrayBuffer) { const avatarPointer = await uploadFileToFsWithOnionV4(encryptedData); if (!avatarPointer) { - window.log.warn('failed to upload avatar to fileserver'); + window.log.warn('failed to upload avatar to file server'); return null; } const { fileUrl, fileId } = avatarPointer; @@ -592,7 +726,7 @@ export async function uploadOurAvatar(newAvatarDecrypted?: ArrayBuffer) { }); // Replace our temporary image with the attachment pointer from the server: ourConvo.set('avatarInProfile', undefined); - const displayName = ourConvo.get('displayNameInProfile'); + const displayName = ourConvo.getRealSessionUsername(); // write the profileKey even if it did not change ourConvo.set({ profileKey: toHex(profileKey) }); @@ -608,7 +742,7 @@ export async function uploadOurAvatar(newAvatarDecrypted?: ArrayBuffer) { if (newAvatarDecrypted) { await setLastProfileUpdateTimestamp(Date.now()); - await ConfigurationSync.queueNewJobIfNeeded(); + await UserSync.queueNewJobIfNeeded(); const userConfigLibsession = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); if (!userConfigLibsession) { @@ -620,8 +754,8 @@ export async function uploadOurAvatar(newAvatarDecrypted?: ArrayBuffer) { ); } return { - avatarPointer: ourConvo.get('avatarPointer'), - profileKey: ourConvo.get('profileKey'), + avatarPointer: ourConvo.getAvatarPointer(), + profileKey: ourConvo.getProfileKey(), }; } @@ -629,7 +763,7 @@ export async function uploadOurAvatar(newAvatarDecrypted?: ArrayBuffer) { * This function can be used for clearing our avatar. */ export async function clearOurAvatar(commit: boolean = true) { - const ourConvo = getConversationController().get(UserUtils.getOurPubKeyStrFromCache()); + const ourConvo = ConvoHub.use().get(UserUtils.getOurPubKeyStrFromCache()); if (!ourConvo) { window.log.warn('ourConvo not found... This is not a valid case'); return; @@ -662,9 +796,7 @@ export async function replyToMessage(messageId: string) { window.log.warn('Failed to find message to reply to'); return false; } - const conversationModel = getConversationController().getOrThrow( - quotedMessageModel.get('conversationId') - ); + const conversationModel = ConvoHub.use().getOrThrow(quotedMessageModel.get('conversationId')); const quotedMessageProps = await conversationModel.makeQuote(quotedMessageModel); @@ -730,7 +862,7 @@ function isURL(str: string) { } export async function callRecipient(pubkey: string, canCall: boolean) { - const convo = getConversationController().get(pubkey); + const convo = ConvoHub.use().get(pubkey); if (!canCall) { ToastUtils.pushUnableToCall(); @@ -749,8 +881,8 @@ export async function callRecipient(pubkey: string, canCall: boolean) { /** * Updates the interaction state for a conversation. Remember to run clearConversationInteractionState() when the interaction is complete and we don't want to show it in the UI anymore. - * @param conversationId id of the converation we want to interact with - * @param type the type of conversation interaciton we are doing + * @param conversationId id of the conversation we want to interact with + * @param type the type of conversation interaction we are doing * @param status the status of that interaction */ export async function updateConversationInteractionState({ @@ -762,7 +894,7 @@ export async function updateConversationInteractionState({ type: ConversationInteractionType; status: ConversationInteractionStatus; }) { - const convo = getConversationController().get(conversationId); + const convo = ConvoHub.use().get(conversationId); if ( convo && (type !== convo.get('lastMessageInteractionType') || @@ -787,7 +919,7 @@ export async function clearConversationInteractionState({ }: { conversationId: string; }) { - const convo = getConversationController().get(conversationId); + const convo = ConvoHub.use().get(conversationId); if ( convo && (convo.get('lastMessageInteractionType') || convo.get('lastMessageInteractionStatus')) @@ -807,7 +939,7 @@ async function saveConversationInteractionErrorAsMessage({ conversationId: string; interactionType: ConversationInteractionType; }) { - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); if (!conversation) { return; } @@ -827,7 +959,7 @@ async function saveConversationInteractionErrorAsMessage({ // Add an error message to the database so we can view it in the message history await conversation?.addSingleIncomingMessage({ - source: GetNetworkTime.getNowWithNetworkOffset().toString(), + source: NetworkTime.now().toString(), sent_at: Date.now(), interactionNotification: { interactionType, @@ -839,3 +971,73 @@ async function saveConversationInteractionErrorAsMessage({ conversation.updateLastMessage(); } + +export async function promoteUsersInGroup({ + groupPk, + toPromote, +}: { toPromote: Array } & WithGroupPubkey) { + if (!toPromote.length) { + window.log.debug('promoteUsersInGroup: no users to promote'); + return; + } + + const convo = ConvoHub.use().get(groupPk); + if (!convo) { + window.log.debug('promoteUsersInGroup: group convo not found'); + return; + } + + const groupInWrapper = await UserGroupsWrapperActions.getGroup(groupPk); + if (!groupInWrapper || !groupInWrapper.secretKey || isEmpty(groupInWrapper.secretKey)) { + window.log.debug('promoteUsersInGroup: groupInWrapper not found or no secretkey'); + return; + } + + // push one group change message were initial members are added to the group + const membersHex = uniq(toPromote); + const sentAt = NetworkTime.now(); + const us = UserUtils.getOurPubKeyStrFromCache(); + const msgModel = await ClosedGroup.addUpdateMessage({ + diff: { type: 'promoted', promoted: membersHex }, + expireUpdate: null, + sender: us, + sentAt, + convo, + markAlreadySent: false, // the store below will mark the message as sent with dbMsgIdentifier + }); + const groupMemberChange = await GroupUpdateMessageFactory.getPromotedControlMessage({ + adminSecretKey: groupInWrapper.secretKey, + convo, + groupPk, + promoted: membersHex, + createAtNetworkTimestamp: sentAt, + dbMsgIdentifier: msgModel.id, + }); + + if (!groupMemberChange) { + window.log.warn('promoteUsersInGroup: failed to build group change'); + throw new Error('promoteUsersInGroup: failed to build group change'); + } + + const storeRequests = await StoreGroupRequestFactory.makeGroupMessageSubRequest( + [groupMemberChange], + groupInWrapper + ); + + const result = await MessageSender.sendEncryptedDataToSnode({ + destination: groupPk, + method: 'batch', + sortedSubRequests: storeRequests, + }); + + if (result?.[0].code !== 200) { + window.log.warn('promoteUsersInGroup: failed to store change'); + throw new Error('promoteUsersInGroup: failed to store change'); + } + + for (let index = 0; index < membersHex.length; index++) { + const member = membersHex[index]; + // eslint-disable-next-line no-await-in-loop + await GroupPromote.addJob({ groupPk, member }); + } +} diff --git a/ts/interactions/conversations/unsendingInteractions.ts b/ts/interactions/conversations/unsendingInteractions.ts index d34d3747ca..06995fdd1b 100644 --- a/ts/interactions/conversations/unsendingInteractions.ts +++ b/ts/interactions/conversations/unsendingInteractions.ts @@ -1,14 +1,16 @@ -import { compact } from 'lodash'; +import { GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; +import { compact, isEmpty } from 'lodash'; import { SessionButtonColor } from '../../components/basic/SessionButton'; import { Data } from '../../data/data'; import { ConversationModel } from '../../models/conversation'; import { MessageModel } from '../../models/message'; -import { getMessageQueue } from '../../session'; import { deleteSogsMessageByServerIds } from '../../session/apis/open_group_api/sogsv3/sogsV3DeleteMessages'; import { SnodeAPI } from '../../session/apis/snode_api/SNodeAPI'; import { SnodeNamespaces } from '../../session/apis/snode_api/namespaces'; -import { getConversationController } from '../../session/conversations'; +import { ConvoHub } from '../../session/conversations'; +import { getSodiumRenderer } from '../../session/crypto'; import { UnsendMessage } from '../../session/messages/outgoing/controlMessage/UnsendMessage'; +import { GroupUpdateDeleteMemberContentMessage } from '../../session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateDeleteMemberContentMessage'; import { PubKey } from '../../session/types'; import { ToastUtils, UserUtils } from '../../session/utils'; import { closeRightPanel, resetSelectedMessageIds } from '../../state/ducks/conversations'; @@ -16,62 +18,129 @@ import { updateConfirmModal } from '../../state/ducks/modalDialog'; import { resetRightOverlayMode } from '../../state/ducks/section'; import { ed25519Str } from '../../session/utils/String'; -/** - * Deletes messages for everyone in a 1-1 or everyone in a closed group conversation. - */ -async function unsendMessagesForEveryone( +import { UserGroupsWrapperActions } from '../../webworker/workers/browser/libsession_worker_interface'; +import { NetworkTime } from '../../util/NetworkTime'; +import { MessageQueue } from '../../session/sending'; + +async function unsendMessagesForEveryone1o1AndLegacy( conversation: ConversationModel, + destination: PubkeyType, msgsToDelete: Array ) { - window?.log?.info('Deleting messages for all users in this conversation'); - const destinationId = conversation.id; - if (!destinationId) { - return; - } - if (conversation.isOpenGroupV2()) { - throw new Error( - 'Cannot unsend a message for an opengroup v2. This has to be a deleteMessage api call' - ); + const unsendMsgObjects = getUnsendMessagesObjects1o1OrLegacyGroups(msgsToDelete); + + if (conversation.isClosedGroupV2()) { + throw new Error('unsendMessagesForEveryone1o1AndLegacy not compatible with group v2'); } - const unsendMsgObjects = getUnsendMessagesObjects(msgsToDelete); if (conversation.isPrivate()) { // sending to recipient all the messages separately for now await Promise.all( unsendMsgObjects.map(unsendObject => - getMessageQueue() - .sendToPubKey(new PubKey(destinationId), unsendObject, SnodeNamespaces.UserMessages) + MessageQueue.use() + .sendToPubKey(new PubKey(destination), unsendObject, SnodeNamespaces.Default) .catch(window?.log?.error) ) ); await Promise.all( unsendMsgObjects.map(unsendObject => - getMessageQueue() - .sendSyncMessage({ namespace: SnodeNamespaces.UserMessages, message: unsendObject }) + MessageQueue.use() + .sendSyncMessage({ namespace: SnodeNamespaces.Default, message: unsendObject }) .catch(window?.log?.error) ) ); - } else if (conversation.isClosedGroup()) { + return; + } + if (conversation.isClosedGroup()) { // sending to recipient all the messages separately for now await Promise.all( unsendMsgObjects.map(unsendObject => { - return getMessageQueue() + return MessageQueue.use() .sendToGroup({ message: unsendObject, - namespace: SnodeNamespaces.ClosedGroupMessage, - groupPubKey: new PubKey(destinationId), + namespace: SnodeNamespaces.LegacyClosedGroup, + groupPubKey: new PubKey(destination), }) .catch(window?.log?.error); }) ); } +} + +export async function unsendMessagesForEveryoneGroupV2({ + allMessagesFrom, + groupPk, + msgsToDelete, +}: { + groupPk: GroupPubkeyType; + msgsToDelete: Array; + allMessagesFrom: Array; +}) { + const messageHashesToUnsend = getMessageHashes(msgsToDelete); + const group = await UserGroupsWrapperActions.getGroup(groupPk); + + if (!messageHashesToUnsend.length && !allMessagesFrom.length) { + window.log.info('unsendMessagesForEveryoneGroupV2: no hashes nor author to remove'); + return; + } + + await MessageQueue.use().sendToGroupV2NonDurably({ + message: new GroupUpdateDeleteMemberContentMessage({ + createAtNetworkTimestamp: NetworkTime.now(), + expirationType: 'unknown', // GroupUpdateDeleteMemberContentMessage is not displayed so not expiring. + expireTimer: 0, + groupPk, + memberSessionIds: allMessagesFrom, + messageHashes: messageHashesToUnsend, + sodium: await getSodiumRenderer(), + secretKey: group?.secretKey || undefined, + }), + }); +} + +/** + * Deletes messages for everyone in a 1-1 or everyone in a closed group conversation. + */ +async function unsendMessagesForEveryone( + conversation: ConversationModel, + msgsToDelete: Array +) { + window?.log?.info('Deleting messages for all users in this conversation'); + const destinationId = conversation.id as string; + if (!destinationId) { + return; + } + if (conversation.isOpenGroupV2()) { + throw new Error( + 'Cannot unsend a message for an opengroup v2. This has to be a deleteMessage api call' + ); + } + + if ( + conversation.isPrivate() || + (conversation.isClosedGroup() && !conversation.isClosedGroupV2()) + ) { + if (!PubKey.is05Pubkey(conversation.id)) { + throw new Error('unsendMessagesForEveryone1o1AndLegacy requires a 05 key'); + } + await unsendMessagesForEveryone1o1AndLegacy(conversation, conversation.id, msgsToDelete); + } else if (conversation.isClosedGroupV2()) { + if (!PubKey.is03Pubkey(destinationId)) { + throw new Error('invalid conversation id (03) for unsendMessageForEveryone'); + } + await unsendMessagesForEveryoneGroupV2({ + groupPk: destinationId, + msgsToDelete, + allMessagesFrom: [], // currently we cannot remove all the messages from a specific pubkey but we do already handle them on the receiving side + }); + } await deleteMessagesFromSwarmAndCompletelyLocally(conversation, msgsToDelete); window.inboxStore?.dispatch(resetSelectedMessageIds()); ToastUtils.pushDeleted(msgsToDelete.length); } -function getUnsendMessagesObjects(messages: Array) { +function getUnsendMessagesObjects1o1OrLegacyGroups(messages: Array) { // #region building request return compact( messages.map(message => { @@ -85,7 +154,7 @@ function getUnsendMessagesObjects(messages: Array) { } const unsendParams = { - timestamp, + createAtNetworkTimestamp: timestamp, author, }; @@ -95,6 +164,18 @@ function getUnsendMessagesObjects(messages: Array) { // #endregion } +function getMessageHashes(messages: Array) { + return compact( + messages.map(message => { + return message.get('messageHash'); + }) + ); +} + +function isStringArray(value: unknown): value is Array { + return Array.isArray(value) && value.every(val => typeof val === 'string'); +} + /** * Do a single request to the swarm with all the message hashes to delete from the swarm. * @@ -102,19 +183,33 @@ function getUnsendMessagesObjects(messages: Array) { * * Returns true if no errors happened, false in an error happened */ -export async function deleteMessagesFromSwarmOnly(messages: Array) { +export async function deleteMessagesFromSwarmOnly( + messages: Array | Array, + pubkey: PubkeyType | GroupPubkeyType +) { + const deletionMessageHashes = isStringArray(messages) ? messages : getMessageHashes(messages); + try { - const deletionMessageHashes = compact(messages.map(m => m.get('messageHash'))); - if (deletionMessageHashes.length > 0) { - const errorOnSnode = await SnodeAPI.networkDeleteMessages(deletionMessageHashes); - return errorOnSnode === null || errorOnSnode.length === 0; + if (isEmpty(messages)) { + return false; } - window.log?.warn( - 'deleteMessagesFromSwarmOnly: We do not have hashes for some of those messages' - ); - return false; + + if (!deletionMessageHashes.length) { + window.log?.warn( + 'deleteMessagesFromSwarmOnly: We do not have hashes for some of those messages' + ); + return false; + } + const hashesAsSet = new Set(deletionMessageHashes); + if (PubKey.is03Pubkey(pubkey)) { + return await SnodeAPI.networkDeleteMessagesForGroup(hashesAsSet, pubkey); + } + return await SnodeAPI.networkDeleteMessageOurSwarm(hashesAsSet, pubkey); } catch (e) { - window.log?.error('deleteMessagesFromSwarmOnly: Error deleting message from swarm', e); + window.log?.error( + `deleteMessagesFromSwarmOnly: Error deleting message from swarm of ${ed25519Str(pubkey)}, hashes: ${deletionMessageHashes}`, + e + ); return false; } } @@ -127,7 +222,17 @@ export async function deleteMessagesFromSwarmAndCompletelyLocally( conversation: ConversationModel, messages: Array ) { - if (conversation.isClosedGroup()) { + const pubkey = conversation.id; + if (!PubKey.is03Pubkey(pubkey) && !PubKey.is05Pubkey(pubkey)) { + throw new Error('deleteMessagesFromSwarmAndCompletelyLocally needs a 03 or 05 pk'); + } + if (PubKey.is05Pubkey(pubkey) && pubkey !== UserUtils.getOurPubKeyStrFromCache()) { + throw new Error( + 'deleteMessagesFromSwarmAndCompletelyLocally with 05 pk can only delete for ourself' + ); + } + // LEGACY GROUPS -- we cannot delete on the swarm (just unsend which is done separately) + if (conversation.isClosedGroup() && PubKey.is05Pubkey(pubkey)) { window.log.info('Cannot delete message from a closed group swarm, so we just complete delete.'); await Promise.all( messages.map(async message => { @@ -136,13 +241,13 @@ export async function deleteMessagesFromSwarmAndCompletelyLocally( ); return; } - window.log.warn( + window.log.info( 'Deleting from swarm of ', - ed25519Str(conversation.id), + ed25519Str(pubkey), ' hashes: ', messages.map(m => m.get('messageHash')) ); - const deletedFromSwarm = await deleteMessagesFromSwarmOnly(messages); + const deletedFromSwarm = await deleteMessagesFromSwarmOnly(messages, pubkey); if (!deletedFromSwarm) { window.log.warn( 'deleteMessagesFromSwarmAndCompletelyLocally: some messages failed to be deleted. Maybe they were already deleted?' @@ -163,8 +268,11 @@ export async function deleteMessagesFromSwarmAndMarkAsDeletedLocally( conversation: ConversationModel, messages: Array ) { - if (conversation.isClosedGroup()) { - window.log.info('Cannot delete messages from a closed group swarm, so we just markDeleted.'); + // legacy groups cannot delete messages on the swarm (just "unsend") + if (conversation.isClosedGroup() && PubKey.is05Pubkey(conversation.id)) { + window.log.info( + 'Cannot delete messages from a legacy closed group swarm, so we just markDeleted.' + ); await Promise.all( messages.map(async message => { return deleteMessageLocallyOnly({ conversation, message, deletionType: 'markDeleted' }); @@ -172,7 +280,14 @@ export async function deleteMessagesFromSwarmAndMarkAsDeletedLocally( ); return; } - const deletedFromSwarm = await deleteMessagesFromSwarmOnly(messages); + + // we can only delete messages on the swarm when they are on our own swarm, or it is a groupv2 that we are the admin off + const pubkeyToDeleteFrom = PubKey.is03Pubkey(conversation.id) + ? conversation.id + : UserUtils.getOurPubKeyStrFromCache(); + + // if this is a groupv2 and we don't have the admin key, it will fail and return false. + const deletedFromSwarm = await deleteMessagesFromSwarmOnly(messages, pubkeyToDeleteFrom); if (!deletedFromSwarm) { window.log.warn( 'deleteMessagesFromSwarmAndMarkAsDeletedLocally: some messages failed to be deleted but still removing the messages content... ' @@ -221,13 +336,13 @@ async function unsendMessageJustForThisUser( ) { window?.log?.warn('Deleting messages just for this user'); - const unsendMsgObjects = getUnsendMessagesObjects(msgsToDelete); + const unsendMsgObjects = getUnsendMessagesObjects1o1OrLegacyGroups(msgsToDelete); // sending to our other devices all the messages separately for now await Promise.all( unsendMsgObjects.map(unsendObject => - getMessageQueue() - .sendSyncMessage({ namespace: SnodeNamespaces.UserMessages, message: unsendObject }) + MessageQueue.use() + .sendSyncMessage({ namespace: SnodeNamespaces.Default, message: unsendObject }) .catch(window?.log?.error) ) ); @@ -249,7 +364,7 @@ const doDeleteSelectedMessagesInSOGS = async ( } // #region open group v2 deletion // Get our Moderator status - const isAdmin = conversation.isAdmin(ourDevicePubkey); + const isAdmin = conversation.weAreAdminUnblinded(); const isModerator = conversation.isModerator(ourDevicePubkey); if (!isAllOurs && !(isAdmin || isModerator)) { @@ -302,15 +417,38 @@ const doDeleteSelectedMessages = async ({ return; } - const isAllOurs = selectedMessages.every(message => ourDevicePubkey === message.getSource()); - if (conversation.isPublic() && deleteForEveryone) { - await doDeleteSelectedMessagesInSOGS(selectedMessages, conversation, isAllOurs); + const areAllOurs = selectedMessages.every(message => message.getSource() === ourDevicePubkey); + if (conversation.isPublic()) { + await doDeleteSelectedMessagesInSOGS(selectedMessages, conversation, areAllOurs); return; } - // #region deletion for 1-1 and closed groups - if (deleteForEveryone) { - if (!isAllOurs) { + /** + * Note: groupv2 support deleteForEveryone only. + * For groupv2, a user can delete only his messages, but an admin can delete the messages of anyone. + * */ + if (deleteForEveryone || conversation.isClosedGroupV2()) { + if (conversation.isClosedGroupV2()) { + const convoId = conversation.id; + if (!PubKey.is03Pubkey(convoId)) { + throw new Error('unsend request for groupv2 but not a 03 key is impossible possible'); + } + // only lookup adminKey if we need to + if (!areAllOurs) { + const group = await UserGroupsWrapperActions.getGroup(convoId); + const weHaveAdminKey = !isEmpty(group?.secretKey); + if (!weHaveAdminKey) { + ToastUtils.pushMessageDeleteForbidden(); + window.inboxStore?.dispatch(resetSelectedMessageIds()); + return; + } + } + // if they are all ours, of not but we are an admin, we can move forward + await unsendMessagesForEveryone(conversation, selectedMessages); + return; + } + + if (!areAllOurs) { ToastUtils.pushMessageDeleteForbidden(); window.inboxStore?.dispatch(resetSelectedMessageIds()); return; @@ -319,7 +457,7 @@ const doDeleteSelectedMessages = async ({ return; } - // delete just for me in a closed group only means delete locally + // delete just for me in a legacy closed group only means delete locally if (conversation.isClosedGroup()) { await deleteMessagesFromSwarmAndCompletelyLocally(conversation, selectedMessages); @@ -330,8 +468,6 @@ const doDeleteSelectedMessages = async ({ } // otherwise, delete that message locally, from our swarm and from our other devices await unsendMessageJustForThisUser(conversation, selectedMessages); - - // #endregion }; /** @@ -356,7 +492,8 @@ export async function deleteMessagesByIdForEveryone( messageIds: Array, conversationId: string ) { - const conversation = getConversationController().getOrThrow(conversationId); + const conversation = ConvoHub.use().getOrThrow(conversationId); + const isMe = conversation.isMe(); const selectedMessages = compact( await Promise.all(messageIds.map(m => Data.getMessageById(m, false))) ); @@ -365,9 +502,13 @@ export async function deleteMessagesByIdForEveryone( window.inboxStore?.dispatch( updateConfirmModal({ - title: window.i18n('clearMessagesForEveryone'), + title: isMe + ? window.i18n('deleteMessageDevicesAll') + : window.i18n('clearMessagesForEveryone'), i18nMessage: { token: 'deleteMessage', args: { count: selectedMessages.length } }, - okText: window.i18n('clearMessagesForEveryone'), + okText: isMe + ? window.i18n('deleteMessageDevicesAll') + : window.i18n('clearMessagesForEveryone'), okTheme: SessionButtonColor.Danger, onClickOk: async () => { await doDeleteSelectedMessages({ selectedMessages, conversation, deleteForEveryone: true }); @@ -383,7 +524,7 @@ export async function deleteMessagesByIdForEveryone( } export async function deleteMessagesById(messageIds: Array, conversationId: string) { - const conversation = getConversationController().getOrThrow(conversationId); + const conversation = ConvoHub.use().getOrThrow(conversationId); const selectedMessages = compact( await Promise.all(messageIds.map(m => Data.getMessageById(m, false))) ); @@ -395,14 +536,20 @@ export async function deleteMessagesById(messageIds: Array, conversation window.inboxStore?.dispatch( updateConfirmModal({ - title: window.i18n('clearMessagesForMe'), - i18nMessage: { token: 'deleteMessage', args: { count: selectedMessages.length } }, + title: window.i18n('deleteMessage', { count: selectedMessages.length }), radioOptions: !isMe ? [ - { label: window.i18n('clearMessagesForMe'), value: 'clearMessagesForMe' as const }, + { + label: window.i18n('clearMessagesForMe'), + value: 'clearMessagesForMe' as const, + inputDataTestId: 'input-deleteJustForMe' as const, + labelDataTestId: 'label-deleteJustForMe' as const, + }, { label: window.i18n('clearMessagesForEveryone'), value: clearMessagesForEveryone, + inputDataTestId: 'input-deleteForEveryone' as const, + labelDataTestId: 'label-deleteForEveryone' as const, }, ] : undefined, diff --git a/ts/interactions/messageInteractions.ts b/ts/interactions/messageInteractions.ts index a996977a5a..c67a4ec28e 100644 --- a/ts/interactions/messageInteractions.ts +++ b/ts/interactions/messageInteractions.ts @@ -7,7 +7,7 @@ import { isOpenGroupV2, openGroupV2CompleteURLRegex, } from '../session/apis/open_group_api/utils/OpenGroupUtils'; -import { getConversationController } from '../session/conversations'; +import { ConvoHub } from '../session/conversations'; import { PubKey } from '../session/types'; import { ToastUtils } from '../session/utils'; @@ -67,10 +67,10 @@ export function copyBodyToClipboard(body?: string | null) { export async function removeSenderFromModerator(sender: string, convoId: string) { try { const pubKeyToRemove = PubKey.cast(sender); - const convo = getConversationController().getOrThrow(convoId); + const convo = ConvoHub.use().getOrThrow(convoId); const userDisplayName = - getConversationController().get(sender)?.getNicknameOrRealUsernameOrPlaceholder() || + ConvoHub.use().get(sender)?.getNicknameOrRealUsernameOrPlaceholder() || window.i18n('unknown'); const roomInfo = convo.toOpenGroupV2(); @@ -91,7 +91,7 @@ export async function removeSenderFromModerator(sender: string, convoId: string) export async function addSenderAsModerator(sender: string, convoId: string) { try { const pubKeyToAdd = PubKey.cast(sender); - const convo = getConversationController().getOrThrow(convoId); + const convo = ConvoHub.use().getOrThrow(convoId); const roomInfo = convo.toOpenGroupV2(); const res = await sogsV3AddAdmin([pubKeyToAdd], roomInfo); @@ -102,7 +102,7 @@ export async function addSenderAsModerator(sender: string, convoId: string) { } else { window?.log?.info(`${pubKeyToAdd.key} added to moderators...`); const userDisplayName = - getConversationController().get(sender)?.getNicknameOrRealUsernameOrPlaceholder() || + ConvoHub.use().get(sender)?.getNicknameOrRealUsernameOrPlaceholder() || window.i18n('unknown'); ToastUtils.pushUserAddedToModerators(userDisplayName); } diff --git a/ts/mains/main_node.ts b/ts/mains/main_node.ts index cd4ba2a3f6..243203b01e 100644 --- a/ts/mains/main_node.ts +++ b/ts/mains/main_node.ts @@ -29,7 +29,6 @@ import url from 'url'; import Logger from 'bunyan'; import _, { isEmpty, isNumber, isFinite } from 'lodash'; -import pify from 'pify'; import { setupGlobalErrorHandler } from '../node/global_errors'; // checked - only node import { setup as setupSpellChecker } from '../node/spell_check'; // checked - only node @@ -39,7 +38,7 @@ import packageJson from '../../package.json'; // checked - only node setupGlobalErrorHandler(); -const getRealPath = pify(fs.realpath); +const getRealPath = (p: string) => fs.realpathSync(p); // Hardcoding appId to prevent build failures on release. // const appUserModelId = packageJson.build.appId; @@ -529,7 +528,7 @@ setTimeout(readyForUpdates, TEN_MINUTES); function openReleaseNotes() { void shell.openExternal( - `https://github.com/oxen-io/session-desktop/releases/tag/v${app.getVersion()}` + `https://github.com/session-foundation/session-desktop/releases/tag/v${app.getVersion()}` ); } @@ -696,14 +695,13 @@ async function saveDebugLog(_event: any, additionalInfo?: string) { console.error('Error saving debug log', err); } } - // This method will be called when Electron has finished // initialization and is ready to create browser windows. // Some APIs can only be used after this event occurs. let ready = false; app.on('ready', async () => { - const userDataPath = await getRealPath(app.getPath('userData')); - const installPath = await getRealPath(join(app.getAppPath(), '..', '..')); + const userDataPath = getRealPath(app.getPath('userData')); + const installPath = getRealPath(join(app.getAppPath(), '..', '..')); installFileHandler({ protocol: electronProtocol, @@ -757,7 +755,7 @@ function getDefaultSQLKey() { async function removeDB() { // this don't remove attachments and stuff like that... - const userDir = await getRealPath(app.getPath('userData')); + const userDir = getRealPath(app.getPath('userData')); sqlNode.removeDB(userDir); try { @@ -783,7 +781,7 @@ async function removeDB() { } async function showMainWindow(sqlKey: string, passwordAttempt = false) { - const userDataPath = await getRealPath(app.getPath('userData')); + const userDataPath = getRealPath(app.getPath('userData')); await sqlNode.initializeSql({ configDir: userDataPath, @@ -1022,9 +1020,7 @@ ipc.on('get-start-in-tray', event => { ipcMain.on('update-badge-count', (_event, count) => { if (app.isReady()) { - app.setBadgeCount( - isNumber(count) && isFinite(count) && count >= 0 ? count : 0 - ); + app.setBadgeCount(isNumber(count) && isFinite(count) && count >= 0 ? count : 0); } }); diff --git a/ts/mains/main_renderer.tsx b/ts/mains/main_renderer.tsx index 30307879da..badfd7d52b 100644 --- a/ts/mains/main_renderer.tsx +++ b/ts/mains/main_renderer.tsx @@ -1,5 +1,5 @@ import Backbone from 'backbone'; -import _ from 'lodash'; +import _, { toPairs } from 'lodash'; import { createRoot } from 'react-dom/client'; import nativeEmojiData from '@emoji-mart/data'; @@ -15,7 +15,7 @@ import { SettingsKey } from '../data/settings-key'; import { MessageModel } from '../models/message'; import { queueAllCached } from '../receiver/receiver'; import { loadKnownBlindedKeys } from '../session/apis/open_group_api/sogsv3/knownBlindedkeys'; -import { getConversationController } from '../session/conversations'; +import { ConvoHub } from '../session/conversations'; import { DisappearingMessages } from '../session/disappearing_messages'; import { AttachmentDownloads, ToastUtils } from '../session/utils'; import { getOurPubKeyStrFromCache } from '../session/utils/User'; @@ -136,14 +136,13 @@ ipcRenderer.on('native-theme-update', (__unused, shouldUseDarkColors) => { async function startJobRunners() { // start the job runners - await runners.avatarDownloadRunner.loadJobsFromDb(); - runners.avatarDownloadRunner.startProcessing(); - await runners.configurationSyncRunner.loadJobsFromDb(); - runners.configurationSyncRunner.startProcessing(); - await runners.updateMsgExpiryRunner.loadJobsFromDb(); - runners.updateMsgExpiryRunner.startProcessing(); - await runners.fetchSwarmMsgExpiryRunner.loadJobsFromDb(); - runners.fetchSwarmMsgExpiryRunner.startProcessing(); + const pairs = toPairs(runners); + for (let index = 0; index < pairs.length; index++) { + const runner = pairs[index][1]; + // eslint-disable-next-line no-await-in-loop + await runner.loadJobsFromDb(); + runner.startProcessing(); + } } // We need this 'first' check because we don't want to start the app up any other time @@ -191,7 +190,7 @@ Storage.onready(async () => { // Stop background processing AttachmentDownloads.stop(); // Stop processing incoming messages - // TODOLATER stop polling opengroupv2 and swarm nodes + // TODOLATER stop polling opengroup v2 and swarm nodes // Shut down the data interface cleanly await Data.shutdown(); @@ -205,7 +204,6 @@ Storage.onready(async () => { if (newVersion) { window.log.info(`New version detected: ${currentVersion}; previous: ${lastVersion}`); - await Data.cleanupOrphanedAttachments(); } @@ -226,9 +224,9 @@ Storage.onready(async () => { await initialiseEmojiData(nativeEmojiData); await AttachmentDownloads.initAttachmentPaths(); + await BlockedNumberController.load(); await Promise.all([ - getConversationController().load(), - BlockedNumberController.load(), + ConvoHub.use().load(), OpenGroupData.opengroupRoomsLoad(), loadKnownBlindedKeys(), ]); @@ -304,7 +302,7 @@ async function start() { window.setAutoHideMenuBar(hideMenuBar); window.setMenuBarVisibility(!hideMenuBar); // eslint-disable-next-line more/no-then - void getConversationController() + void ConvoHub.use() .loadPromise() ?.then(() => { const container = document.getElementById('root'); diff --git a/ts/models/conversation.ts b/ts/models/conversation.ts index 7582f5dd81..0f5d57332e 100644 --- a/ts/models/conversation.ts +++ b/ts/models/conversation.ts @@ -17,22 +17,24 @@ import { xor, } from 'lodash'; +import { DisappearingMessageConversationModeType } from 'libsession_util_nodejs'; import { v4 } from 'uuid'; import { SignalService } from '../protobuf'; -import { getMessageQueue } from '../session'; -import { getConversationController } from '../session/conversations'; -import { ClosedGroupVisibleMessage } from '../session/messages/outgoing/visibleMessage/ClosedGroupVisibleMessage'; +import { ConvoHub } from '../session/conversations'; +import { + ClosedGroupV2VisibleMessage, + ClosedGroupVisibleMessage, +} from '../session/messages/outgoing/visibleMessage/ClosedGroupVisibleMessage'; import { PubKey } from '../session/types'; import { ToastUtils, UserUtils } from '../session/utils'; import { BlockedNumberController } from '../util'; import { MessageModel } from './message'; -import { MessageAttributesOptionals, MessageDirection } from './messageType'; +import { MessageAttributesOptionals } from './messageType'; import { Data } from '../data/data'; import { OpenGroupUtils } from '../session/apis/open_group_api/utils'; import { getOpenGroupV2FromConversationId } from '../session/apis/open_group_api/utils/OpenGroupUtils'; import { ExpirationTimerUpdateMessage } from '../session/messages/outgoing/controlMessage/ExpirationTimerUpdateMessage'; -import { ReadReceiptMessage } from '../session/messages/outgoing/controlMessage/receipt/ReadReceiptMessage'; import { TypingMessage } from '../session/messages/outgoing/controlMessage/TypingMessage'; import { GroupInvitationMessage } from '../session/messages/outgoing/visibleMessage/GroupInvitationMessage'; import { OpenGroupVisibleMessage } from '../session/messages/outgoing/visibleMessage/OpenGroupVisibleMessage'; @@ -64,20 +66,18 @@ import { } from '../session/apis/open_group_api/sogsv3/knownBlindedkeys'; import { SogsBlinding } from '../session/apis/open_group_api/sogsv3/sogsBlinding'; import { sogsV3FetchPreviewAndSaveIt } from '../session/apis/open_group_api/sogsv3/sogsV3FetchFile'; -import { GetNetworkTime } from '../session/apis/snode_api/getNetworkTime'; import { SnodeNamespaces } from '../session/apis/snode_api/namespaces'; import { getSodiumRenderer } from '../session/crypto'; import { addMessagePadding } from '../session/crypto/BufferPadding'; -import { getDecryptedMediaUrl } from '../session/crypto/DecryptedAttachmentsManager'; +import { DecryptedAttachmentsManager } from '../session/crypto/DecryptedAttachmentsManager'; import { MessageRequestResponse, MessageRequestResponseParams, } from '../session/messages/outgoing/controlMessage/MessageRequestResponse'; -import { ConfigurationSync } from '../session/utils/job_runners/jobs/ConfigurationSyncJob'; +import { UserSync } from '../session/utils/job_runners/jobs/UserSyncJob'; import { SessionUtilContact } from '../session/utils/libsession/libsession_utils_contacts'; import { SessionUtilConvoInfoVolatile } from '../session/utils/libsession/libsession_utils_convo_info_volatile'; import { SessionUtilUserGroups } from '../session/utils/libsession/libsession_utils_user_groups'; -import { forceSyncConfigurationNowIfNeeded } from '../session/utils/sync/syncUtils'; import { getOurProfile } from '../session/utils/User'; import { deleteExternalFilesOfConversation, @@ -105,23 +105,40 @@ import { READ_MESSAGE_STATE, } from './conversationAttributes'; +import { ReadReceiptMessage } from '../session/messages/outgoing/controlMessage/receipt/ReadReceiptMessage'; +import { PreConditionFailed } from '../session/utils/errors'; import { LibSessionUtil } from '../session/utils/libsession/libsession_utils'; import { SessionUtilUserProfile } from '../session/utils/libsession/libsession_utils_user_profile'; import { ReduxSogsRoomInfos } from '../state/ducks/sogsRoomInfo'; +import { + getLibGroupAdminsOutsideRedux, + getLibGroupMembersOutsideRedux, + getLibGroupNameOutsideRedux, +} from '../state/selectors/groups'; import { getCanWriteOutsideRedux, getModeratorsOutsideRedux, getSubscriberCountOutsideRedux, } from '../state/selectors/sogsRoomInfo'; // decide it it makes sense to move this to a redux slice? +import { handleAcceptConversationRequest } from '../interactions/conversationInteractions'; import { DisappearingMessages } from '../session/disappearing_messages'; -import { DisappearingMessageConversationModeType } from '../session/disappearing_messages/types'; +import { GroupUpdateInfoChangeMessage } from '../session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInfoChangeMessage'; import { FetchMsgExpirySwarm } from '../session/utils/job_runners/jobs/FetchMsgExpirySwarmJob'; +import { GroupSync } from '../session/utils/job_runners/jobs/GroupSyncJob'; import { UpdateMsgExpirySwarm } from '../session/utils/job_runners/jobs/UpdateMsgExpirySwarmJob'; +import { getLibGroupKickedOutsideRedux } from '../state/selectors/userGroups'; import { ReleasedFeatures } from '../util/releaseFeature'; +import { + MetaGroupWrapperActions, + UserGroupsWrapperActions, +} from '../webworker/workers/browser/libsession_worker_interface'; import { markAttributesAsReadIfNeeded } from './messageFactory'; +import { StoreGroupRequestFactory } from '../session/apis/snode_api/factories/StoreGroupRequestFactory'; import { OpenGroupRequestCommonType } from '../data/types'; import { ConversationTypeEnum, CONVERSATION_PRIORITIES } from './types'; +import { NetworkTime } from '../util/NetworkTime'; +import { MessageQueue } from '../session/sending'; type InMemoryConvoInfos = { mentionedUs: boolean; @@ -135,14 +152,14 @@ type InMemoryConvoInfos = { const inMemoryConvoInfos: Map = new Map(); export class ConversationModel extends Backbone.Model { - public updateLastMessage: () => unknown; // unknown because it is a Promise that we do not wait to await + public updateLastMessage: () => unknown; // unknown because it is a Promise that we do not want to await public throttledBumpTyping: () => void; public throttledNotify: (message: MessageModel) => void; public markConversationRead: (opts: { newestUnreadDate: number; fromConfigMessage?: boolean; }) => void; - public initialPromise: any; + public initialPromise: Promise; private typingRefreshTimer?: NodeJS.Timeout | null; private typingPauseTimer?: NodeJS.Timeout | null; @@ -153,7 +170,7 @@ export class ConversationModel extends Backbone.Model { constructor(attributes: ConversationAttributes) { super(fillConvoAttributesWithDefaults(attributes)); - // This may be overridden by getConversationController().getOrCreate, and signify + // This may be overridden by ConvoHub.use().getOrCreate, and signify // our first save to the database. Or first fetch from the database. this.initialPromise = Promise.resolve(); autoBind(this); @@ -179,16 +196,23 @@ export class ConversationModel extends Backbone.Model { window.inboxStore?.dispatch(conversationsChanged([this.getConversationModelProps()])); } - public idForLogging() { - if (this.isPrivate()) { - return this.id; - } - - if (this.isPublic()) { - return this.id; + public idForLogging(): string { + const type = this.get('type'); + switch (type) { + case ConversationTypeEnum.PRIVATE: + return this.id; + case ConversationTypeEnum.GROUPV2: + return `group(${ed25519Str(this.id)})`; + case ConversationTypeEnum.GROUP: { + if (this.isPublic()) { + return this.id; + } + return `group(${ed25519Str(this.id)})`; + } + default: + assertUnreachable(type, `idForLogging case not handled for type:"${type}"`); } - - return `group(${ed25519Str(this.id)})`; + return this.id; } public isMe() { @@ -215,16 +239,26 @@ export class ConversationModel extends Backbone.Model { public isClosedGroup(): boolean { return Boolean( - (this.get('type') === ConversationTypeEnum.GROUP && this.id.startsWith('05')) || - (this.get('type') === ConversationTypeEnum.GROUPV3 && this.id.startsWith('03')) + (this.get('type') === ConversationTypeEnum.GROUP && PubKey.is05Pubkey(this.id)) || + this.isClosedGroupV2() ); } + public isClosedGroupV2() { + return Boolean(this.get('type') === ConversationTypeEnum.GROUPV2 && PubKey.is03Pubkey(this.id)); + } + public isPrivate() { return isDirectConversation(this.get('type')); } - // returns true if this is a closed/medium or open group + public isPrivateAndBlinded() { + return this.isPrivate() && PubKey.isBlinded(this.id); + } + + /** + * @returns true if this is a legacy, closed or community + */ public isGroup() { return isOpenOrClosedGroup(this.get('type')); } @@ -248,7 +282,7 @@ export class ConversationModel extends Backbone.Model { * For instance, all of the conversations created when receiving a community are not active, until we start directly talking with them (or they do). */ public isActive() { - return Boolean(this.get('active_at')); + return Boolean(this.getActiveAt()); } /** @@ -256,10 +290,10 @@ export class ConversationModel extends Backbone.Model { * @returns true if this conversation is private and hidden. * A non-private conversation cannot be hidden currently. * - a community is removed straight away when we leave it and not marked hidden - * - a legacy group is kept visible if we leave it, until we explicitely delete it. At that time, it is removed completely and not marked hidden + * - a legacy group is kept visible if we leave it, until we explicitly delete it. At that time, it is removed completely and not marked hidden */ public isHidden() { - const priority = this.get('priority') || CONVERSATION_PRIORITIES.default; + const priority = this.getPriority(); return this.isPrivate() && priority === CONVERSATION_PRIORITIES.hidden; } @@ -267,26 +301,35 @@ export class ConversationModel extends Backbone.Model { await deleteExternalFilesOfConversation(this.attributes); } - public getGroupAdmins(): Array { - const groupAdmins = this.get('groupAdmins'); + public getPriority() { + if (PubKey.is05Pubkey(this.id) && this.isPrivate()) { + // TODO once we have a libsession state, we can make this used accross the app without repeating as much + // if a private chat, trust the value from the Libsession wrapper cached first + const contact = SessionUtilContact.getContactCached(this.id); + if (contact) { + return contact.priority; + } + } + return this.get('priority') || CONVERSATION_PRIORITIES.default; + } - return groupAdmins && groupAdmins.length > 0 ? groupAdmins : []; + public getNotificationsFor() { + return this.get('triggerNotificationsFor'); } public getConversationModelProps(): ReduxConversationType { - const ourNumber = UserUtils.getOurPubKeyStrFromCache(); const avatarPath = this.getAvatarPath(); const isPrivate = this.isPrivate(); - // TODO we should maybe make this weAreAdmin not props in redux but computed selectors - const weAreAdmin = this.isAdmin(ourNumber); - const currentNotificationSetting = this.get('triggerNotificationsFor'); - const priorityFromDb = this.get('priority'); + const weAreAdmin = this.weAreAdminUnblinded(); + + const currentNotificationSetting = this.getNotificationsFor(); + const priorityFromDb = this.getPriority(); // To reduce the redux store size, only set fields which cannot be undefined. // For instance, a boolean can usually be not set if false, etc const toRet: ReduxConversationType = { id: this.id as string, - activeAt: this.get('active_at'), + activeAt: this.getActiveAt(), type: this.get('type'), }; @@ -294,8 +337,8 @@ export class ConversationModel extends Backbone.Model { toRet.priority = priorityFromDb; } - if (this.get('markedAsUnread')) { - toRet.isMarkedUnread = !!this.get('markedAsUnread'); + if (this.isMarkedUnread()) { + toRet.isMarkedUnread = this.isMarkedUnread(); } const blocksSogsMsgReqsTimestamp = this.get('blocksSogsMsgReqsTimestamp'); @@ -346,27 +389,27 @@ export class ConversationModel extends Backbone.Model { toRet.currentNotificationSetting = currentNotificationSetting; } - if (this.get('displayNameInProfile')) { - toRet.displayNameInProfile = this.get('displayNameInProfile'); + if (this.getRealSessionUsername()) { + toRet.displayNameInProfile = this.getRealSessionUsername(); } - if (this.get('nickname')) { - toRet.nickname = this.get('nickname'); + if (this.getNickname()) { + toRet.nickname = this.getNickname(); } if (BlockedNumberController.isBlocked(this.id)) { toRet.isBlocked = true; } - if (this.get('didApproveMe')) { - toRet.didApproveMe = this.get('didApproveMe'); + if (this.didApproveMe()) { + toRet.didApproveMe = this.didApproveMe(); } - if (this.get('isApproved')) { - toRet.isApproved = this.get('isApproved'); + if (this.isApproved()) { + toRet.isApproved = this.isApproved(); } if (this.getExpireTimer()) { toRet.expireTimer = this.getExpireTimer(); } // those are values coming only from both the DB or the wrapper. Currently we display the data from the DB if (this.isClosedGroup()) { - toRet.members = uniq(this.get('members') || []); + toRet.members = this.getGroupMembers() || []; } // those are values coming only from both the DB or the wrapper. Currently we display the data from the DB @@ -375,16 +418,16 @@ export class ConversationModel extends Backbone.Model { toRet.groupAdmins = this.getGroupAdmins(); } - // those are values coming only from the DB when this is a closed group + if (this.isClosedGroupV2() || this.isPrivateAndBlinded()) { + toRet.conversationIdOrigin = this.getConversationIdOrigin(); + } if (this.isClosedGroup()) { - if (this.get('isKickedFromGroup')) { - toRet.isKickedFromGroup = this.get('isKickedFromGroup'); - } - if (this.get('left')) { - toRet.left = this.get('left'); + // those are values coming only from the DB when this is a closed group + if (this.isKickedFromGroup()) { + toRet.isKickedFromGroup = this.isKickedFromGroup(); } // to be dropped once we get rid of the legacy closed groups - const zombies = this.get('zombies') || []; + const zombies = this.getGroupZombies() || []; if (zombies?.length) { toRet.zombies = uniq(zombies); } @@ -563,39 +606,23 @@ export class ConversationModel extends Backbone.Model { const chatMessageParams: VisibleMessageParams = { body: '', // we need to use a new timestamp here, otherwise android&iOS will consider this message as a duplicate and drop the synced reaction - timestamp: GetNetworkTime.getNowWithNetworkOffset(), + createAtNetworkTimestamp: NetworkTime.now(), reaction, lokiProfile: UserUtils.getOurProfile(), expirationType, expireTimer, }; - const shouldApprove = !this.isApproved() && this.isPrivate(); - const incomingMessageCount = await Data.getMessageCountByType( - this.id, - MessageDirection.incoming - ); - const hasIncomingMessages = incomingMessageCount > 0; - if (PubKey.isBlinded(this.id)) { window.log.info('Sending a blinded message react to this user: ', this.id); await this.sendBlindedMessageRequest(chatMessageParams); return; } - if (shouldApprove) { - await this.setIsApproved(true); - if (hasIncomingMessages) { - // have to manually add approval for local client here as DB conditional approval check in config msg handling will prevent this from running - await this.addOutgoingApprovalMessage(Date.now()); - if (!this.didApproveMe()) { - await this.setDidApproveMe(true); - } - // should only send once - await this.sendMessageRequestResponse(); - void forceSyncConfigurationNowIfNeeded(); - } - } + // handleAcceptConversationRequest will take care of sending response depending on the type of conversation, if needed + await handleAcceptConversationRequest({ + convoId: this.id, + }); if (this.isOpenGroupV2()) { // communities have no expiration timer support, so enforce it here. @@ -611,7 +638,7 @@ export class ConversationModel extends Backbone.Model { const blinded = Boolean(roomHasBlindEnabled(openGroup)); // send with blinding if we need to - await getMessageQueue().sendToOpenGroupV2({ + await MessageQueue.use().sendToOpenGroupV2({ message: chatMessageOpenGroupV2, roomInfos, blinded, @@ -627,17 +654,17 @@ export class ConversationModel extends Backbone.Model { ...chatMessageParams, syncTarget: this.id, }); - await getMessageQueue().sendSyncMessage({ - namespace: SnodeNamespaces.UserMessages, + await MessageQueue.use().sendSyncMessage({ + namespace: SnodeNamespaces.Default, message: chatMessageMe, }); const chatMessagePrivate = new VisibleMessage(chatMessageParams); - await getMessageQueue().sendToPubKey( + await MessageQueue.use().sendToPubKey( destinationPubkey, chatMessagePrivate, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); await Reactions.handleMessageReaction({ reaction, @@ -650,13 +677,12 @@ export class ConversationModel extends Backbone.Model { const chatMessageMediumGroup = new VisibleMessage(chatMessageParams); const closedGroupVisibleMessage = new ClosedGroupVisibleMessage({ chatMessage: chatMessageMediumGroup, - groupId: destinationPubkey, - timestamp: sentAt, + groupId: destinationPubkey.key, }); // we need the return await so that errors are caught in the catch {} - await getMessageQueue().sendToGroup({ + await MessageQueue.use().sendToGroup({ message: closedGroupVisibleMessage, - namespace: SnodeNamespaces.ClosedGroupMessage, + namespace: SnodeNamespaces.LegacyClosedGroup, }); await Reactions.handleMessageReaction({ @@ -677,13 +703,19 @@ export class ConversationModel extends Backbone.Model { * Does this conversation contain the properties to be considered a message request */ public isIncomingRequest(): boolean { + const id = this.id; + const invitePending = PubKey.is03Pubkey(id) + ? UserGroupsWrapperActions.getCachedGroup(id)?.invitePending || false + : false; return hasValidIncomingRequestValues({ + id, isMe: this.isMe(), isApproved: this.isApproved(), isBlocked: this.isBlocked(), isPrivate: this.isPrivate(), - activeAt: this.get('active_at'), + activeAt: this.getActiveAt(), didApproveMe: this.didApproveMe(), + invitePending, }); } @@ -697,13 +729,13 @@ export class ConversationModel extends Backbone.Model { didApproveMe: this.didApproveMe() || false, isBlocked: this.isBlocked() || false, isPrivate: this.isPrivate() || false, - activeAt: this.get('active_at') || 0, + activeAt: this.getActiveAt() || 0, }); } /** * When you have accepted another users message request - * @param timestamp for determining the order for this message to appear like a regular message + * Note: you shouldn't need to use this directly. Instead use `handleAcceptConversationRequest()` */ public async addOutgoingApprovalMessage(timestamp: number) { await this.addSingleOutgoingMessage({ @@ -736,32 +768,31 @@ export class ConversationModel extends Backbone.Model { } /** - * Sends an accepted message request response. + * Sends an accepted message request response to a private chat * Currently, we never send anything for denied message requests. + * Note: you shouldn't need to use this directly. Instead use `handleAcceptConversationRequest()` */ public async sendMessageRequestResponse() { if (!this.isPrivate()) { return; } - const timestamp = Date.now(); - const messageRequestResponseParams: MessageRequestResponseParams = { - timestamp, + createAtNetworkTimestamp: NetworkTime.now(), lokiProfile: UserUtils.getOurProfile(), }; const messageRequestResponse = new MessageRequestResponse(messageRequestResponseParams); const pubkeyForSending = new PubKey(this.id); - await getMessageQueue() - .sendToPubKey(pubkeyForSending, messageRequestResponse, SnodeNamespaces.UserMessages) + await MessageQueue.use() + .sendToPubKey(pubkeyForSending, messageRequestResponse, SnodeNamespaces.Default) .catch(window?.log?.error); } public async sendMessage(msg: SendMessageType) { const { attachments, body, groupInvitation, preview, quote } = msg; this.clearTypingTimers(); - const networkTimestamp = GetNetworkTime.getNowWithNetworkOffset(); + const networkTimestamp = NetworkTime.now(); window?.log?.info( 'Sending message to conversation', @@ -775,7 +806,7 @@ export class ConversationModel extends Backbone.Model { quote: isEmpty(quote) ? undefined : quote, preview, attachments, - sent_at: networkTimestamp, + sent_at: networkTimestamp, // overridden later, but we need one to have the sorting done in the UI even when the sending is pending expirationType: DisappearingMessages.changeToDisappearingMessageType( this, this.getExpireTimer(), @@ -836,7 +867,7 @@ export class ConversationModel extends Backbone.Model { * @param providedDisappearingMode * @param providedExpireTimer * @param providedSource the pubkey of the user who made the change - * @param receivedAt the timestamp of when the change was received + * @param sentAt the timestamp of when the change was sent (when receiving it) * @param fromSync if the change was made from a sync message * @param shouldCommitConvo if the conversation change should be committed to the DB * @param shouldCommitMessage if the timer update message change should be committed to the DB @@ -847,7 +878,7 @@ export class ConversationModel extends Backbone.Model { providedDisappearingMode, providedExpireTimer, providedSource, - receivedAt, // is set if it comes from outside + sentAt, // is set if it comes from outside fromSync, // if the update comes from sync message ONLY fromConfigMessage, // if the update comes from a libsession config message ONLY fromCurrentDevice, @@ -857,21 +888,20 @@ export class ConversationModel extends Backbone.Model { providedDisappearingMode?: DisappearingMessageConversationModeType; providedExpireTimer?: number; providedSource?: string; - receivedAt?: number; // is set if it comes from outside + sentAt?: number; // is set if it comes from outside fromSync: boolean; fromCurrentDevice: boolean; fromConfigMessage: boolean; shouldCommitConvo?: boolean; existingMessage?: MessageModel; }): Promise { - const isRemoteChange = Boolean( - (receivedAt || fromSync || fromConfigMessage) && !fromCurrentDevice - ); + const isRemoteChange = Boolean((sentAt || fromSync || fromConfigMessage) && !fromCurrentDevice); // we don't add an update message when this comes from a config message, as we already have the SyncedMessage itself with the right timestamp to display - - if (this.isPublic()) { - throw new Error("updateExpireTimer() Disappearing messages aren't supported in communities"); + if (!this.isClosedGroup() && !this.isPrivate()) { + throw new Error( + 'updateExpireTimer() Disappearing messages are only supported int groups and private chats' + ); } let expirationMode = providedDisappearingMode; let expireTimer = providedExpireTimer; @@ -882,30 +912,34 @@ export class ConversationModel extends Backbone.Model { expireTimer = 0; } const shouldAddExpireUpdateMsgPrivate = this.isPrivate() && !fromConfigMessage; - const isLegacyGroup = this.isClosedGroup() && !PubKey.isClosedGroupV3(this.id); + const isLegacyGroup = this.isClosedGroup() && !PubKey.is03Pubkey(this.id); /** * it's ugly, but we want to add a message for legacy groups only when * - not coming from a config message * - effectively changes the setting - * - ignores a off setting for a legacy group (as we can get a setting from restored from configMessage, and a newgroup can still be in the swarm when linking a device + * - ignores a off setting for a legacy group (as we can get a setting from restored from configMessage, and a new group can still be in the swarm when linking a device */ - const shouldAddExpireUpdateMsgGroup = + const shouldAddExpireUpdateMsgLegacyGroup = fromCurrentDevice || (isLegacyGroup && !fromConfigMessage && (expirationMode !== this.get('expirationMode') || expireTimer !== this.get('expireTimer')) && expirationMode !== 'off'); + + const shouldAddExpireUpdateMsgGroupV2 = this.isClosedGroupV2() && !fromConfigMessage; + const shouldAddExpireUpdateMessage = - shouldAddExpireUpdateMsgPrivate || shouldAddExpireUpdateMsgGroup; + shouldAddExpireUpdateMsgPrivate || + shouldAddExpireUpdateMsgLegacyGroup || + shouldAddExpireUpdateMsgGroupV2; // When we add a disappearing messages notification to the conversation, we want it // to be above the message that initiated that change, hence the subtraction. - const timestamp = (receivedAt || Date.now()) - 1; + const createAtNetworkTimestamp = (sentAt || NetworkTime.now()) - 1; // NOTE when we turn the disappearing setting to off, we don't want it to expire with the previous expiration anymore - const isV2DisappearReleased = ReleasedFeatures.isDisappearMessageV2FeatureReleasedCached(); // when the v2 disappear is released, the changes we make are only for our outgoing messages, not shared with a contact anymore if (isV2DisappearReleased) { @@ -961,11 +995,11 @@ export class ConversationModel extends Backbone.Model { }; if (!message) { - if (!receivedAt) { + if (!sentAt) { // outgoing message message = await this.addSingleOutgoingMessage({ ...commonAttributes, - sent_at: timestamp, + sent_at: createAtNetworkTimestamp, }); } else { message = await this.addSingleIncomingMessage({ @@ -973,16 +1007,17 @@ export class ConversationModel extends Backbone.Model { // Even though this isn't reflected to the user, we want to place the last seen indicator above it. We set it to 'unread' to trigger that placement. unread: READ_MESSAGE_STATE.unread, source, - sent_at: timestamp, - received_at: timestamp, + sent_at: createAtNetworkTimestamp, + received_at: createAtNetworkTimestamp, }); } } - // Note: we agreed that a closed group ControlMessage message does not expire. + // Note: we agreed that a **legacy closed** group ControlMessage message does not expire. + // Group v2 on the other hand, have expiring disappearing control message message.set({ - expirationType: this.isClosedGroup() ? 'unknown' : expirationType, - expireTimer: this.isClosedGroup() ? 0 : expireTimer, + expirationType: this.isClosedGroup() && !this.isClosedGroupV2() ? 'unknown' : expirationType, + expireTimer: this.isClosedGroup() && !this.isClosedGroupV2() ? 0 : expireTimer, }); if (!message.get('id')) { @@ -990,7 +1025,7 @@ export class ConversationModel extends Backbone.Model { } if (this.isActive()) { - this.set('active_at', timestamp); + this.set('active_at', createAtNetworkTimestamp); } if (shouldCommitConvo) { @@ -1001,7 +1036,7 @@ export class ConversationModel extends Backbone.Model { // if change was made remotely, don't send it to the contact/group if (isRemoteChange) { window.log.debug( - `[updateExpireTimer] remote change, not sending message again. receivedAt: ${receivedAt} fromSync: ${fromSync} fromCurrentDevice: ${fromCurrentDevice} for ${ed25519Str( + `[updateExpireTimer] remote change, not sending message again. sentAt: ${sentAt} fromSync: ${fromSync} fromCurrentDevice: ${fromCurrentDevice} for ${ed25519Str( this.id )}` ); @@ -1009,7 +1044,8 @@ export class ConversationModel extends Backbone.Model { if (!message.getExpirationStartTimestamp()) { // Note: we agreed that a closed group ControlMessage message does not expire. - const canBeDeleteAfterSend = this.isMe() || !(this.isGroup() && message.isControlMessage()); + const canBeDeleteAfterSend = + this.isMe() || !(this.isGroup() && !this.isClosedGroupV2() && message.isControlMessage()); if ( (canBeDeleteAfterSend && expirationMode === 'legacy') || expirationMode === 'deleteAfterSend' @@ -1037,8 +1073,8 @@ export class ConversationModel extends Backbone.Model { // We would have returned if that message sending part was not needed // const expireUpdate = { - identifier: message.id, - timestamp, + identifier: message.id as string, + createAtNetworkTimestamp, expirationType, expireTimer, }; @@ -1059,16 +1095,54 @@ export class ConversationModel extends Backbone.Model { const expirationTimerMessage = new ExpirationTimerUpdateMessage(expireUpdate); const pubkey = new PubKey(this.get('id')); - await getMessageQueue().sendToPubKey( + await MessageQueue.use().sendToPubKey( pubkey, expirationTimerMessage, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); return true; } + if (this.isClosedGroup()) { if (this.isAdmin(UserUtils.getOurPubKeyStrFromCache())) { - // NOTE: we agreed that outgoing ExpirationTimerUpdate **for groups** are not expiring, + if (this.isClosedGroupV2()) { + if (!PubKey.is03Pubkey(this.id)) { + throw new Error('updateExpireTimer v2 group requires a 03 key'); + } + const group = await UserGroupsWrapperActions.getGroup(this.id); + if (!group || !group.secretKey) { + throw new Error( + 'trying to change timer for a group we do not have the secretKey is not possible' + ); + } + const info = await MetaGroupWrapperActions.infoGet(this.id); + info.expirySeconds = expireUpdate.expireTimer; + await MetaGroupWrapperActions.infoSet(this.id, info); + const v2groupMessage = new GroupUpdateInfoChangeMessage({ + typeOfChange: SignalService.GroupUpdateInfoChangeMessage.Type.DISAPPEARING_MESSAGES, + ...expireUpdate, + groupPk: this.id, + identifier: message.get('id'), + sodium: await getSodiumRenderer(), + secretKey: group.secretKey, + updatedExpirationSeconds: expireUpdate.expireTimer, + }); + + const extraStoreRequests = await StoreGroupRequestFactory.makeGroupMessageSubRequest( + [v2groupMessage], + group + ); + + await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk: this.id, + extraStoreRequests, + }); + + await GroupSync.queueNewJobIfNeeded(this.id); + return true; + } + + // NOTE: we agreed that outgoing ExpirationTimerUpdate **for legacy groups** are not expiring, // but they still need the content to be right(as this is what we use for the change itself) const expireUpdateForGroup = { @@ -1078,9 +1152,9 @@ export class ConversationModel extends Backbone.Model { const expirationTimerMessage = new ExpirationTimerUpdateMessage(expireUpdateForGroup); - await getMessageQueue().sendToGroup({ + await MessageQueue.use().sendToGroup({ message: expirationTimerMessage, - namespace: SnodeNamespaces.ClosedGroupMessage, + namespace: SnodeNamespaces.LegacyClosedGroup, }); return true; } @@ -1099,7 +1173,7 @@ export class ConversationModel extends Backbone.Model { public async commit() { perfStart(`conversationCommit-${this.id}`); - await commitConversationAndRefreshWrapper(this.id); + await Convo.commitConversationAndRefreshWrapper(this.id); perfEnd(`conversationCommit-${this.id}`, 'conversationCommit'); } @@ -1109,7 +1183,7 @@ export class ConversationModel extends Backbone.Model { 'conversationId' | 'source' | 'type' | 'direction' | 'received_at' | 'unread' > ) { - let sender = UserUtils.getOurPubKeyStrFromCache(); + let sender: string = UserUtils.getOurPubKeyStrFromCache(); if (this.isPublic()) { const openGroup = OpenGroupData.getV2OpenGroupRoom(this.id); if (openGroup && openGroup.serverPublicKey && roomHasBlindEnabled(openGroup)) { @@ -1228,12 +1302,12 @@ export class ConversationModel extends Backbone.Model { window?.log?.info(`Sending ${timestamps.length} read receipts.`); const receiptMessage = new ReadReceiptMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: NetworkTime.now(), timestamps, }); const device = new PubKey(this.id); - await getMessageQueue().sendToPubKey(device, receiptMessage, SnodeNamespaces.UserMessages); + await MessageQueue.use().sendToPubKey(device, receiptMessage, SnodeNamespaces.Default); } public async setNickname(nickname: string | null, shouldCommit = false) { @@ -1283,7 +1357,7 @@ export class ConversationModel extends Backbone.Model { this.set({ avatarInProfile: newProfile.avatarPath }); changes = true; } - const existingImageId = this.get('avatarImageId'); + const existingImageId = this.getAvatarImageId(); if (existingImageId !== newProfile.avatarImageId) { this.set({ avatarImageId: newProfile.avatarImageId }); @@ -1307,7 +1381,7 @@ export class ConversationModel extends Backbone.Model { * @returns `displayNameInProfile` so the real username as defined by that user/group */ public getRealSessionUsername(): string | undefined { - return this.get('displayNameInProfile'); + return getLibGroupNameOutsideRedux(this.id) || this.get('displayNameInProfile'); } /** @@ -1317,6 +1391,18 @@ export class ConversationModel extends Backbone.Model { return this.isPrivate() ? this.get('nickname') || undefined : undefined; } + public getAvatarImageId(): number | undefined { + return this.isPublic() ? this.get('avatarImageId') || undefined : undefined; + } + + public getProfileKey(): string | undefined { + return this.get('profileKey'); + } + + public getAvatarPointer(): string | undefined { + return this.get('avatarPointer'); + } + /** * @returns `getNickname` if a private convo and a nickname is set, or `getRealSessionUsername` */ @@ -1355,6 +1441,14 @@ export class ConversationModel extends Backbone.Model { return Array.isArray(groupAdmins) && groupAdmins.includes(pubKey); } + public weAreAdminUnblinded() { + const us = UserUtils.getOurPubKeyStrFromCache(); + if (!us) { + throw new PreConditionFailed('weAreAdminUnblinded: our pubkey is not set'); + } + return this.isAdmin(us); + } + /** * Check if the provided pubkey is a moderator. * Being a moderator only makes sense for a sogs as closed groups have their admin under the groupAdmins property @@ -1380,7 +1474,7 @@ export class ConversationModel extends Backbone.Model { priority: number, shouldCommit: boolean = true ): Promise { - if (priority !== this.get('priority')) { + if (priority !== this.getPriority()) { this.set({ priority, }); @@ -1413,7 +1507,7 @@ export class ConversationModel extends Backbone.Model { if (!this.isPrivate()) { return; } - const priority = this.get('priority'); + const priority = this.getPriority(); if (priority >= CONVERSATION_PRIORITIES.default) { this.set({ priority: CONVERSATION_PRIORITIES.hidden }); if (shouldCommit) { @@ -1428,7 +1522,7 @@ export class ConversationModel extends Backbone.Model { * A pinned cannot be hidden, as the it is all based on the same priority values. */ public async unhideIfNeeded(shouldCommit: boolean = true) { - const priority = this.get('priority'); + const priority = this.getPriority(); if (isFinite(priority) && priority < CONVERSATION_PRIORITIES.default) { this.set({ priority: CONVERSATION_PRIORITIES.default }); if (shouldCommit) { @@ -1488,7 +1582,7 @@ export class ConversationModel extends Backbone.Model { public async setIsApproved(value: boolean, shouldCommit: boolean = true) { const valueForced = Boolean(value); - if (!this.isPrivate()) { + if (!this.isPrivate() && !this.isClosedGroupV2()) { return; } @@ -1525,14 +1619,29 @@ export class ConversationModel extends Backbone.Model { } } - public async setOriginConversationID(conversationIdOrigin: string) { - if (conversationIdOrigin === this.get('conversationIdOrigin')) { + public async setOriginConversationID(conversationIdOrigin: string, shouldCommit: boolean) { + if (conversationIdOrigin === this.getConversationIdOrigin()) { return; } + // conversationIdOrigin can only be a 05 pubkey (invite to a 03 group from a 05 person, or a sogs url), or undefined + if ( + conversationIdOrigin && + !PubKey.is05Pubkey(conversationIdOrigin) && + !OpenGroupUtils.isOpenGroupV2(conversationIdOrigin) + ) { + window.log.warn( + 'tried to setOriginConversationID with invalid parameter:', + conversationIdOrigin + ); + throw new Error('tried to setOriginConversationID with invalid parameter '); + } this.set({ conversationIdOrigin, }); - await this.commit(); + + if (shouldCommit) { + await this.commit(); + } } /** @@ -1629,7 +1738,7 @@ export class ConversationModel extends Backbone.Model { const profileKeyHex = toHex(profileKey); // profileKey is a string so we can compare it directly - if (this.get('profileKey') !== profileKeyHex) { + if (this.getProfileKey() !== profileKeyHex) { this.set({ profileKey: profileKeyHex, }); @@ -1641,7 +1750,7 @@ export class ConversationModel extends Backbone.Model { } public hasMember(pubkey: string) { - return includes(this.get('members'), pubkey); + return includes(this.getGroupMembers(), pubkey); } public hasReactions() { @@ -1672,21 +1781,30 @@ export class ConversationModel extends Backbone.Model { } public isPinned() { - const priority = this.get('priority'); + const priority = this.getPriority(); return isFinite(priority) && priority > CONVERSATION_PRIORITIES.default; } public didApproveMe() { - return Boolean(this.get('didApproveMe')); + if (PubKey.is05Pubkey(this.id) && this.isPrivate()) { + // if a private chat, trust the value from the Libsession wrapper cached first + // TODO once we have a libsession state, we can make this used accross the app without repeating as much + return SessionUtilContact.getContactCached(this.id)?.approvedMe ?? !!this.get('didApproveMe'); + } + return !!this.get('didApproveMe'); } public isApproved() { - return Boolean(this.get('isApproved')); + if (PubKey.is05Pubkey(this.id) && this.isPrivate()) { + // if a private chat, trust the value from the Libsession wrapper cached first + return SessionUtilContact.getContactCached(this.id)?.approved ?? !!this.get('isApproved'); + } + return !!this.get('isApproved'); } /** - * For a private convo, returns the loki profilename if set, or a shortened + * For a private convo, returns the loki profile name if set, or a shortened * version of the contact pubkey. * Throws an error if called on a group convo. * @@ -1703,7 +1821,7 @@ export class ConversationModel extends Backbone.Model { return window.i18n('you'); } - const profileName = this.get('displayNameInProfile'); + const profileName = this.getRealSessionUsername(); return profileName || PubKey.shorten(pubkey); } @@ -1728,7 +1846,11 @@ export class ConversationModel extends Backbone.Model { if (!avatarUrl) { return noIconUrl; } - const decryptedAvatarUrl = await getDecryptedMediaUrl(avatarUrl, IMAGE_JPEG, true); + const decryptedAvatarUrl = await DecryptedAttachmentsManager.getDecryptedMediaUrl( + avatarUrl, + IMAGE_JPEG, + true + ); if (!decryptedAvatarUrl) { window.log.warn('Could not decrypt avatar stored locally for getNotificationIcon..'); @@ -1749,7 +1871,7 @@ export class ConversationModel extends Backbone.Model { if (!this.isApproved() && !isLegacyGroup) { window?.log?.info('notification cancelled for unapproved convo', this.idForLogging()); const hadNoRequestsPrior = - getConversationController() + ConvoHub.use() .getConversations() .filter(conversation => { return ( @@ -1773,7 +1895,7 @@ export class ConversationModel extends Backbone.Model { } // make sure the notifications are not muted for this convo (and not the source convo) - const convNotif = this.get('triggerNotificationsFor'); + const convNotif = this.getNotificationsFor(); if (convNotif === 'disabled') { window?.log?.info('notifications disabled for convo', this.idForLogging()); return; @@ -1799,7 +1921,7 @@ export class ConversationModel extends Backbone.Model { } } - const convo = await getConversationController().getOrCreateAndWait( + const convo = await ConvoHub.use().getOrCreateAndWait( message.get('source'), ConversationTypeEnum.PRIVATE ); @@ -1830,7 +1952,7 @@ export class ConversationModel extends Backbone.Model { const conversationId = this.id; // make sure the notifications are not muted for this convo (and not the source convo) - const convNotif = this.get('triggerNotificationsFor'); + const convNotif = this.getNotificationsFor(); if (convNotif === 'disabled') { window?.log?.info( 'notifyIncomingCall: notifications disabled for convo', @@ -1884,25 +2006,80 @@ export class ConversationModel extends Backbone.Model { return this.markConversationReadBouncy({ newestUnreadDate, fromConfigMessage: true }); } + public getGroupAdmins(): Array { + if (this.isClosedGroupV2()) { + return getLibGroupAdminsOutsideRedux(this.id); + } + const groupAdmins = this.get('groupAdmins'); + + return groupAdmins && groupAdmins.length > 0 ? groupAdmins : []; + } + + public isKickedFromGroup(): boolean { + if (this.isClosedGroup()) { + if (this.isClosedGroupV2()) { + return getLibGroupKickedOutsideRedux(this.id) || false; + } + return !!this.get('isKickedFromGroup'); + } + return false; + } + + public getActiveAt(): number | undefined { + return this.get('active_at'); + } + + public getLastJoinedTimestamp(): number { + if (this.isClosedGroup()) { + return this.get('lastJoinedTimestamp') || 0; + } + return 0; + } + + public getGroupMembers(): Array { + if (this.isClosedGroup()) { + if (this.isClosedGroupV2()) { + return getLibGroupMembersOutsideRedux(this.id); + } + const members = this.get('members'); + return members && members.length > 0 ? members : []; + } + return []; + } + + public getGroupZombies(): Array { + if (this.isClosedGroup()) { + // closed group with 03 prefix does not have the concepts of zombies + if (this.isClosedGroupV2()) { + return []; + } + const zombies = this.get('zombies'); + return zombies && zombies.length > 0 ? zombies : []; + } + return []; + } + private async sendMessageJob(message: MessageModel) { try { const { body, attachments, preview, quote, fileIdsToLink } = await message.uploadData(); const { id } = message; - const destination = this.id; + const destination = this.id as string; - const sentAt = message.get('sent_at'); + const sentAt = message.get('sent_at'); // this is used to store the timestamp when we tried sending that message, it should be set by the caller if (!sentAt) { - throw new Error('sendMessageJob() sent_at must be set.'); + throw new Error('sendMessageJob() sent_at is not set.'); } + const networkTimestamp = NetworkTime.now(); // we are trying to send a message to someone. Make sure this convo is not hidden await this.unhideIfNeeded(true); + // TODO break down those functions (sendMessage and retrySend into smaller functions and narrow the VisibleMessageParams to preview, etc. with checks of types) // an OpenGroupV2 message is just a visible message const chatMessageParams: VisibleMessageParams = { body, identifier: id, - timestamp: sentAt, + createAtNetworkTimestamp: networkTimestamp, attachments, expirationType: message.getExpirationType() ?? 'unknown', // Note we assume that the caller used a setting allowed for that conversation when building it. Here we just send it. expireTimer: message.getExpireTimerSeconds(), @@ -1911,32 +2088,16 @@ export class ConversationModel extends Backbone.Model { lokiProfile: UserUtils.getOurProfile(), }; - const shouldApprove = !this.isApproved() && this.isPrivate(); - const incomingMessageCount = await Data.getMessageCountByType( - this.id, - MessageDirection.incoming - ); - const hasIncomingMessages = incomingMessageCount > 0; - if (PubKey.isBlinded(this.id)) { window.log.info('Sending a blinded message to this user: ', this.id); await this.sendBlindedMessageRequest(chatMessageParams); return; } - if (shouldApprove) { - await this.setIsApproved(true); - if (hasIncomingMessages) { - // have to manually add approval for local client here as DB conditional approval check in config msg handling will prevent this from running - await this.addOutgoingApprovalMessage(Date.now()); - if (!this.didApproveMe()) { - await this.setDidApproveMe(true); - } - // should only send once - await this.sendMessageRequestResponse(); - void forceSyncConfigurationNowIfNeeded(); - } - } + // handleAcceptConversationRequest will take care of sending response depending on the type of conversation + await handleAcceptConversationRequest({ + convoId: this.id, + }); if (this.isOpenGroupV2()) { const chatMessageOpenGroupV2 = new OpenGroupVisibleMessage(chatMessageParams); @@ -1946,7 +2107,7 @@ export class ConversationModel extends Backbone.Model { } const openGroup = OpenGroupData.getV2OpenGroupRoom(this.id); // send with blinding if we need to - await getMessageQueue().sendToOpenGroupV2({ + await MessageQueue.use().sendToOpenGroupV2({ message: chatMessageOpenGroupV2, roomInfos, blinded: Boolean(roomHasBlindEnabled(openGroup)), @@ -1965,56 +2126,62 @@ export class ConversationModel extends Backbone.Model { chatMessageParams.syncTarget = this.id; const chatMessageMe = new VisibleMessage(chatMessageParams); - await getMessageQueue().sendSyncMessage({ - namespace: SnodeNamespaces.UserMessages, + await MessageQueue.use().sendSyncMessage({ + namespace: SnodeNamespaces.Default, message: chatMessageMe, }); return; } - if (message.get('groupInvitation')) { - const groupInvitation = message.get('groupInvitation'); + const communityInvitation = message.getCommunityInvitation(); + + if (communityInvitation && communityInvitation.url) { const groupInviteMessage = new GroupInvitationMessage({ identifier: id, - timestamp: sentAt, - name: groupInvitation.name, - url: groupInvitation.url, + createAtNetworkTimestamp: networkTimestamp, + name: communityInvitation.name, + url: communityInvitation.url, expirationType: chatMessageParams.expirationType, expireTimer: chatMessageParams.expireTimer, }); // we need the return await so that errors are caught in the catch {} - await getMessageQueue().sendToPubKey( + await MessageQueue.use().sendToPubKey( destinationPubkey, groupInviteMessage, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); return; } const chatMessagePrivate = new VisibleMessage(chatMessageParams); - await getMessageQueue().sendToPubKey( + await MessageQueue.use().sendToPubKey( destinationPubkey, chatMessagePrivate, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); return; } + if (this.isClosedGroupV2()) { + // we need the return await so that errors are caught in the catch {} + await this.sendMessageToGroupV2(chatMessageParams); + return; + } + if (this.isClosedGroup()) { if (this.matchesDisappearingMode('deleteAfterRead')) { - throw new Error('Group disappearing messages must be deleteAterSend'); + throw new Error('Group disappearing messages must be deleteAfterSend'); } const chatMessageMediumGroup = new VisibleMessage(chatMessageParams); const closedGroupVisibleMessage = new ClosedGroupVisibleMessage({ chatMessage: chatMessageMediumGroup, - groupId: destinationPubkey, - timestamp: sentAt, + groupId: destinationPubkey.key, // expirationType & expireTimer are part of the chatMessageMediumGroup object }); // we need the return await so that errors are caught in the catch {} - await getMessageQueue().sendToGroup({ + await MessageQueue.use().sendToGroup({ message: closedGroupVisibleMessage, - namespace: SnodeNamespaces.ClosedGroupMessage, + namespace: SnodeNamespaces.LegacyClosedGroup, }); return; } @@ -2025,9 +2192,25 @@ export class ConversationModel extends Backbone.Model { } } + private async sendMessageToGroupV2(chatMessageParams: VisibleMessageParams) { + if (!PubKey.is03Pubkey(this.id)) { + throw new Error('sendMessageToGroupV2 needs a 03 key'); + } + const visibleMessage = new VisibleMessage(chatMessageParams); + const groupVisibleMessage = new ClosedGroupV2VisibleMessage({ + chatMessage: visibleMessage, + destination: this.id, + }); + + // we need the return await so that errors are caught in the catch {} + await MessageQueue.use().sendToGroupV2({ + message: groupVisibleMessage, + }); + } + private async sendBlindedMessageRequest(messageParams: VisibleMessageParams) { const ourSignKeyBytes = await UserUtils.getUserED25519KeyPairBytes(); - const groupUrl = this.getSogsOriginMessage(); + const groupUrl = this.getConversationIdOrigin(); if (!PubKey.isBlinded(this.id)) { window?.log?.warn('sendBlindedMessageRequest - convo is not a blinded one'); @@ -2079,7 +2262,7 @@ export class ConversationModel extends Backbone.Model { this.set({ active_at: Date.now(), isApproved: true }); // TODO we need to add support for sending blinded25 message request in addition to the legacy blinded15 - await getMessageQueue().sendToOpenGroupV2BlindedRequest({ + await MessageQueue.use().sendToOpenGroupV2BlindedRequest({ encryptedContent: encryptedMsg, roomInfos: roomInfo, message: sogsVisibleMessage, @@ -2089,7 +2272,7 @@ export class ConversationModel extends Backbone.Model { // tslint:disable-next-line cyclomatic-complexity private async bouncyUpdateLastMessage() { - if (!this.id || !this.get('active_at') || this.isHidden()) { + if (!this.id || !this.getActiveAt() || this.isHidden()) { return; } const messages = await Data.getLastMessagesByConversation(this.id, 1, true); @@ -2114,20 +2297,19 @@ export class ConversationModel extends Backbone.Model { const lastMessageStatus = lastMessageModel.getMessagePropStatus() || undefined; const lastMessageNotificationText = lastMessageModel.getNotificationText() || undefined; // we just want to set the `status` to `undefined` if there are no `lastMessageNotificationText` - const lastMessageUpdate = - !!lastMessageNotificationText && !isEmpty(lastMessageNotificationText) - ? { - lastMessage: lastMessageNotificationText || '', - lastMessageStatus, - lastMessageInteractionType, - lastMessageInteractionStatus, - } - : { - lastMessage: '', - lastMessageStatus: undefined, - lastMessageInteractionType: undefined, - lastMessageInteractionStatus: undefined, - }; + const lastMessageUpdate = !isEmpty(lastMessageNotificationText) + ? { + lastMessage: lastMessageNotificationText || '', + lastMessageStatus, + lastMessageInteractionType, + lastMessageInteractionStatus, + } + : { + lastMessage: '', + lastMessageStatus: undefined, + lastMessageInteractionType: undefined, + lastMessageInteractionStatus: undefined, + }; const existingLastMessageInteractionType = this.get('lastMessageInteractionType'); const existingLastMessageInteractionStatus = this.get('lastMessageInteractionStatus'); @@ -2226,10 +2408,18 @@ export class ConversationModel extends Backbone.Model { } /** - * - * @returns The open group conversationId this conversation originated from + * @link ConversationAttributes#conversationIdOrigin */ - private getSogsOriginMessage() { + private getConversationIdOrigin() { + if (!this.isClosedGroupV2() && !this.isPrivateAndBlinded()) { + window.log.warn( + 'getConversationIdOrigin can only be set with 03-group or blinded conversation (15 prefix), got:', + this.id + ); + throw new Error( + 'getConversationIdOrigin can only be set with 03-group or blinded conversation (15 prefix)' + ); + } return this.get('conversationIdOrigin'); } @@ -2288,7 +2478,7 @@ export class ConversationModel extends Backbone.Model { ) { return false; } - return Boolean(this.get('isApproved')); + return this.isApproved(); } private async bumpTyping() { @@ -2358,15 +2548,19 @@ export class ConversationModel extends Backbone.Model { } const typingParams = { - timestamp: GetNetworkTime.getNowWithNetworkOffset(), + createAtNetworkTimestamp: NetworkTime.now(), isTyping, - typingTimestamp: GetNetworkTime.getNowWithNetworkOffset(), + typingTimestamp: NetworkTime.now(), }; const typingMessage = new TypingMessage(typingParams); - const device = new PubKey(recipientId); - void getMessageQueue() - .sendToPubKey(device, typingMessage, SnodeNamespaces.UserMessages) + const pubkey = new PubKey(recipientId); + void MessageQueue.use() + .sendTo1o1NonDurably({ + pubkey, + message: typingMessage, + namespace: SnodeNamespaces.Default, + }) .catch(window?.log?.error); } @@ -2404,7 +2598,7 @@ export class ConversationModel extends Backbone.Model { switch (type) { case 'admins': - return this.updateGroupAdmins(replacedWithOurRealSessionId, false); + return this.updateGroupAdmins(replacedWithOurRealSessionId, true); case 'mods': ReduxSogsRoomInfos.setModeratorsOutsideRedux(this.id, replacedWithOurRealSessionId); return false; @@ -2503,15 +2697,14 @@ export class ConversationModel extends Backbone.Model { // #endregion } -export async function commitConversationAndRefreshWrapper(id: string) { - const convo = getConversationController().get(id); +export const Convo = { commitConversationAndRefreshWrapper }; + +async function commitConversationAndRefreshWrapper(id: string) { + const convo = ConvoHub.use().get(id); if (!convo) { return; } - // TODOLATER remove duplicates between db and wrapper (and move search by name or nickname to wrapper) - // TODOLATER insertConvoFromDBIntoWrapperAndRefresh and insertContactFromDBIntoWrapperAndRefresh both fetches the same data from the DB. Might be worth fetching it and providing it to both? - // write to db const savedDetails = await Data.saveConversation(convo.attributes); await convo.refreshInMemoryDetails(savedDetails); @@ -2556,7 +2749,7 @@ export async function commitConversationAndRefreshWrapper(id: string) { if (Registration.isDone()) { // save the new dump if needed to the DB asap // this call throttled so we do not run this too often (and not for every .commit()) - await ConfigurationSync.queueNewJobIfNeeded(); + await UserSync.queueNewJobIfNeeded(); } convo.triggerUIRefresh(); } @@ -2580,7 +2773,7 @@ export class ConversationCollection extends Backbone.Collection) { super(models); this.comparator = (m: ConversationModel) => { - return -(m.get('active_at') || 0); + return -(m.getActiveAt() || 0); }; } } @@ -2619,23 +2812,34 @@ export function hasValidOutgoingRequestValues({ * @param values Required properties to evaluate if this is a message request */ export function hasValidIncomingRequestValues({ + id, isMe, isApproved, isBlocked, isPrivate, activeAt, didApproveMe, + invitePending, }: { + id: string; isMe: boolean; isApproved: boolean; isBlocked: boolean; isPrivate: boolean; didApproveMe: boolean; - activeAt: number; + invitePending: boolean; + activeAt: number | undefined; }): boolean { // if a convo is not active, it means we didn't get any messages nor sent any. const isActive = activeAt && isFinite(activeAt) && activeAt > 0; - return Boolean(isPrivate && !isMe && !isApproved && !isBlocked && isActive && didApproveMe); + return Boolean( + (isPrivate || (PubKey.is03Pubkey(id) && invitePending)) && + !isMe && + !isApproved && + !isBlocked && + isActive && + didApproveMe + ); } async function cleanUpExpireHistoryFromConvo(conversationId: string, isPrivate: boolean) { diff --git a/ts/models/conversationAttributes.ts b/ts/models/conversationAttributes.ts index 3e20548195..a96666303e 100644 --- a/ts/models/conversationAttributes.ts +++ b/ts/models/conversationAttributes.ts @@ -1,5 +1,6 @@ import { defaults } from 'lodash'; import { DisappearingMessageConversationModeType } from '../session/disappearing_messages/types'; + import { ConversationTypeEnum, CONVERSATION_PRIORITIES } from './types'; import { ConversationInteractionType, ConversationInteractionStatus } from '../interactions/types'; import { LastMessageStatusType } from '../state/ducks/types'; @@ -7,7 +8,7 @@ import { LastMessageStatusType } from '../state/ducks/types'; export function isOpenOrClosedGroup(conversationType: ConversationTypeEnum) { return ( conversationType === ConversationTypeEnum.GROUP || - conversationType === ConversationTypeEnum.GROUPV3 + conversationType === ConversationTypeEnum.GROUPV2 ); } @@ -37,7 +38,7 @@ export type ConversationAttributesWithNotSavedOnes = ConversationAttributes & export interface ConversationAttributes { id: string; - type: ConversationTypeEnum.PRIVATE | ConversationTypeEnum.GROUPV3 | ConversationTypeEnum.GROUP; + type: ConversationTypeEnum.PRIVATE | ConversationTypeEnum.GROUPV2 | ConversationTypeEnum.GROUP; // 0 means inactive (undefined and null too but we try to get rid of them and only have 0 = inactive) active_at: number; // this field is the one used to sort conversations in the left pane from most recent @@ -45,7 +46,7 @@ export interface ConversationAttributes { /** * lastMessage is actually just a preview of the last message text, shortened to 60 chars. * This is to avoid filling the redux store with a huge last message when it's only used in the - * preview of a conversation (leftpane). + * preview of a conversation (left pane). * The shortening is made in sql.ts directly. */ lastMessage: string | null; @@ -60,9 +61,9 @@ export interface ConversationAttributes { avatarInProfile?: string; // this is the avatar path locally once downloaded and stored in the application attachments folder - isTrustedForAttachmentDownload: boolean; // not synced accross devices, this field is used if we should auto download attachments from this conversation or not + isTrustedForAttachmentDownload: boolean; // not synced across devices, this field is used if we should auto download attachments from this conversation or not - conversationIdOrigin?: string; // Blinded message requests ONLY: The community from which this conversation originated from + conversationIdOrigin?: string; // The conversation from which this conversation originated from: blinded message request or 03-group admin who invited us // TODOLATER those two items are only used for legacy closed groups and will be removed when we get rid of the legacy closed groups support lastJoinedTimestamp: number; // ClosedGroup: last time we were added to this group // TODOLATER to remove after legacy closed group are dropped @@ -78,7 +79,7 @@ export interface ConversationAttributes { nickname?: string; // this is the name WE gave to that user (only applicable to private chats, not closed group neither opengroups) profileKey?: string; // Consider this being a hex string if it is set triggerNotificationsFor: ConversationNotificationSettingType; - avatarPointer?: string; // this is the url of the avatar on the file server v2. we use this to detect if we need to redownload the avatar from someone (not used for opengroups) + avatarPointer?: string; // this is the url of the avatar on the file server v2. we use this to detect if we need to re-download the avatar from someone (not used for opengroups) /** in seconds, 0 means no expiration */ expireTimer: number; diff --git a/ts/models/groupUpdate.ts b/ts/models/groupUpdate.ts index 22db682f14..e2f3194fba 100644 --- a/ts/models/groupUpdate.ts +++ b/ts/models/groupUpdate.ts @@ -1,4 +1,4 @@ -import { getConversationController } from '../session/conversations'; +import { ConvoHub } from '../session/conversations'; import { UserUtils } from '../session/utils'; import type { LocalizerComponentPropsObject } from '../types/localizer'; @@ -17,9 +17,7 @@ export function getKickedGroupUpdateStr( groupName: string ): LocalizerComponentPropsObject { const { others, us } = usAndXOthers(kicked); - const othersNames = others.map( - getConversationController().getContactProfileNameOrShortenedPubKey - ); + const othersNames = others.map(ConvoHub.use().getContactProfileNameOrShortenedPubKey); if (us) { switch (others.length) { @@ -34,7 +32,7 @@ export function getKickedGroupUpdateStr( switch (othersNames.length) { case 0: - throw new Error('kicked without anyone in it.'); + return { token: 'groupUpdated' }; case 1: return { token: 'groupRemoved', args: { name: othersNames[0] } }; case 2: @@ -56,10 +54,7 @@ export function getKickedGroupUpdateStr( } } -export function getLeftGroupUpdateChangeStr( - left: Array, - _groupName: string -): LocalizerComponentPropsObject { +export function getLeftGroupUpdateChangeStr(left: Array): LocalizerComponentPropsObject { const { others, us } = usAndXOthers(left); if (left.length !== 1) { @@ -71,20 +66,66 @@ export function getLeftGroupUpdateChangeStr( : { token: 'groupMemberLeft', args: { - name: getConversationController().getContactProfileNameOrShortenedPubKey(others[0]), + name: ConvoHub.use().getContactProfileNameOrShortenedPubKey(others[0]), }, }; } export function getJoinedGroupUpdateChangeStr( joined: Array, + groupv2: boolean, + addedWithHistory: boolean, _groupName: string ): LocalizerComponentPropsObject { const { others, us } = usAndXOthers(joined); - const othersNames = others.map( - getConversationController().getContactProfileNameOrShortenedPubKey - ); + const othersNames = others.map(ConvoHub.use().getContactProfileNameOrShortenedPubKey); + if (groupv2) { + if (us) { + switch (othersNames.length) { + case 0: + return { token: addedWithHistory ? 'groupInviteYouHistory' : 'groupInviteYou' }; + case 1: + return addedWithHistory + ? { token: 'groupMemberNewYouHistoryTwo', args: { name: othersNames[0] } } + : { token: 'groupInviteYouAndOtherNew', args: { other_name: othersNames[0] } }; + default: + return addedWithHistory + ? { token: 'groupMemberNewYouHistoryMultiple', args: { count: othersNames.length } } + : { token: 'groupInviteYouAndMoreNew', args: { count: othersNames.length } }; + } + } + switch (othersNames.length) { + case 0: + return { token: 'groupUpdated' }; // this is an invalid case, but well. + case 1: + return addedWithHistory + ? { token: 'groupMemberNewHistory', args: { name: othersNames[0] } } + : { token: 'groupMemberNew', args: { name: othersNames[0] } }; + case 2: + return addedWithHistory + ? { + token: 'groupMemberNewHistoryTwo', + args: { name: othersNames[0], other_name: othersNames[1] }, + } + : { + token: 'groupMemberNewTwo', + args: { name: othersNames[0], other_name: othersNames[1] }, + }; + default: + return addedWithHistory + ? { + token: 'groupMemberNewHistoryMultiple', + args: { name: othersNames[0], count: othersNames.length - 1 }, + } + : { + token: 'groupMemberNewMultiple', + args: { name: othersNames[0], count: othersNames.length - 1 }, + }; + } + } + + // legacy groups if (us) { switch (othersNames.length) { case 0: @@ -97,7 +138,7 @@ export function getJoinedGroupUpdateChangeStr( } switch (othersNames.length) { case 0: - throw new Error('joined without anyone in it.'); + return { token: 'groupUpdated' }; case 1: return { token: 'legacyGroupMemberNew', args: { name: othersNames[0] } }; case 2: @@ -118,3 +159,53 @@ export function getJoinedGroupUpdateChangeStr( }; } } + +export function getPromotedGroupUpdateChangeStr( + joined: Array +): LocalizerComponentPropsObject { + const { others, us } = usAndXOthers(joined); + const othersNames = others.map(ConvoHub.use().getContactProfileNameOrShortenedPubKey); + + if (us) { + switch (othersNames.length) { + case 0: + return { token: 'groupPromotedYou' }; + case 1: + return { token: 'groupPromotedYouTwo', args: { name: othersNames[0] } }; + default: + return { token: 'groupPromotedYouMultiple', args: { count: othersNames.length } }; + } + } + switch (othersNames.length) { + case 0: + return { token: 'groupUpdated' }; + case 1: + return { token: 'adminPromotedToAdmin', args: { name: othersNames[0] } }; + case 2: + return { + token: 'adminTwoPromotedToAdmin', + args: { + name: othersNames[0], + other_name: othersNames[1], + }, + }; + default: + return { + token: 'adminMorePromotedToAdmin', + args: { + name: othersNames[0], + count: othersNames.length - 1, + }, + }; + } +} + +export function getGroupNameChangeStr(newName: string | undefined): LocalizerComponentPropsObject { + return newName + ? { token: 'groupNameNew', args: { group_name: newName } } + : { token: 'groupNameUpdated' }; +} + +export function getGroupDisplayPictureChangeStr(): LocalizerComponentPropsObject { + return { token: 'groupDisplayPictureUpdated' }; +} diff --git a/ts/models/message.ts b/ts/models/message.ts index f317ee33a5..aa971348fa 100644 --- a/ts/models/message.ts +++ b/ts/models/message.ts @@ -2,12 +2,15 @@ import Backbone from 'backbone'; import autoBind from 'auto-bind'; import filesize from 'filesize'; +import { GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; import { cloneDeep, debounce, isEmpty, size as lodashSize, partition, pick, uniq } from 'lodash'; import { SignalService } from '../protobuf'; -import { getMessageQueue } from '../session'; -import { getConversationController } from '../session/conversations'; +import { ConvoHub } from '../session/conversations'; import { ContentMessage } from '../session/messages/outgoing'; -import { ClosedGroupVisibleMessage } from '../session/messages/outgoing/visibleMessage/ClosedGroupVisibleMessage'; +import { + ClosedGroupV2VisibleMessage, + ClosedGroupVisibleMessage, +} from '../session/messages/outgoing/visibleMessage/ClosedGroupVisibleMessage'; import { PubKey } from '../session/types'; import { UserUtils, @@ -30,7 +33,6 @@ import { Data } from '../data/data'; import { OpenGroupData } from '../data/opengroups'; import { SettingsKey } from '../data/settings-key'; import { isUsAnySogsFromCache } from '../session/apis/open_group_api/sogsv3/knownBlindedkeys'; -import { GetNetworkTime } from '../session/apis/snode_api/getNetworkTime'; import { SnodeNamespaces } from '../session/apis/snode_api/namespaces'; import { DURATION } from '../session/constants'; import { DisappearingMessages } from '../session/disappearing_messages'; @@ -48,7 +50,6 @@ import { uploadLinkPreviewsV3, uploadQuoteThumbnailsV3, } from '../session/utils/AttachmentsV2'; -import { perfEnd, perfStart } from '../session/utils/Performance'; import { isUsFromCache } from '../session/utils/User'; import { buildSyncMessage } from '../session/utils/sync/syncUtils'; import { @@ -60,10 +61,11 @@ import { PropsForGroupInvitation, PropsForGroupUpdate, PropsForGroupUpdateAdd, - PropsForGroupUpdateGeneral, + PropsForGroupUpdateAvatarChange, PropsForGroupUpdateKicked, PropsForGroupUpdateLeft, PropsForGroupUpdateName, + PropsForGroupUpdatePromoted, PropsForMessageWithoutConvoProps, PropsForQuote, messagesChanged, @@ -86,12 +88,19 @@ import { ConversationModel } from './conversation'; import { READ_MESSAGE_STATE } from './conversationAttributes'; import { ConversationInteractionStatus, ConversationInteractionType } from '../interactions/types'; import { LastMessageStatusType } from '../state/ducks/types'; +import type { GetMessageArgs, LocalizerToken } from '../types/localizer'; import { + getGroupDisplayPictureChangeStr, + getGroupNameChangeStr, getJoinedGroupUpdateChangeStr, getKickedGroupUpdateStr, getLeftGroupUpdateChangeStr, + getPromotedGroupUpdateChangeStr, } from './groupUpdate'; -import type { GetMessageArgs, LocalizerToken } from '../types/localizer'; +import { NetworkTime } from '../util/NetworkTime'; +import { MessageQueue } from '../session/sending'; +import { getTimerNotificationStr } from './timerNotifications'; +import { ExpirationTimerUpdate } from '../session/disappearing_messages/types'; // tslint:disable: cyclomatic-complexity @@ -221,8 +230,11 @@ export class MessageModel extends Backbone.Model { this.set(attributes); } - public isGroupInvitation() { - return !!this.get('groupInvitation'); + public isCommunityInvitation() { + return !!this.getCommunityInvitation(); + } + public getCommunityInvitation() { + return this.get('groupInvitation'); } public isMessageRequestResponse() { @@ -230,24 +242,30 @@ export class MessageModel extends Backbone.Model { } public isDataExtractionNotification() { - return !!this.get('dataExtractionNotification'); + return !!this.getDataExtractionNotification(); + } + public getDataExtractionNotification() { + return this.get('dataExtractionNotification'); } public isCallNotification() { - return !!this.get('callNotificationType'); + return !!this.getCallNotification(); + } + public getCallNotification() { + return this.get('callNotificationType'); } public isInteractionNotification() { return !!this.getInteractionNotification(); } - public getInteractionNotification() { return this.get('interactionNotification'); } - public getNotificationText() { + public getNotificationText(): string { const groupUpdate = this.getGroupUpdateAsArray(); if (groupUpdate) { + const isGroupV2 = PubKey.is03Pubkey(this.get('conversationId')); const groupName = this.getConversation()?.getNicknameOrRealUsernameOrPlaceholder() || window.i18n('unknown'); @@ -259,12 +277,41 @@ export class MessageModel extends Backbone.Model { } if (groupUpdate.name) { - return window.i18n.stripped('groupNameNew', { group_name: groupUpdate.name }); + const result = getGroupNameChangeStr(groupUpdate.name); + + if ('args' in result) { + return window.i18n.stripped( + ...([result.token, result.args] as GetMessageArgs) + ); + } + return window.i18n.stripped(...([result.token] as GetMessageArgs)); + } + + if (groupUpdate.avatarChange) { + const result = getGroupDisplayPictureChangeStr(); + return window.i18n.stripped(...([result.token] as GetMessageArgs)); } if (groupUpdate.joined?.length) { // @ts-expect-error -- TODO: Fix by using new i18n builder - const { token, args } = getJoinedGroupUpdateChangeStr(groupUpdate.joined, groupName); + const { token, args } = getJoinedGroupUpdateChangeStr( + groupUpdate.joined, + isGroupV2, + false, + groupName + ); + // TODO: clean up this typing + return window.i18n.stripped(...([token, args] as GetMessageArgs)); + } + + if (groupUpdate.joinedWithHistory?.length) { + // @ts-expect-error -- TODO: Fix by using new i18n builder + const { token, args } = getJoinedGroupUpdateChangeStr( + groupUpdate.joinedWithHistory, + true, + true, + groupName + ); // TODO: clean up this typing return window.i18n.stripped(...([token, args] as GetMessageArgs)); } @@ -275,12 +322,18 @@ export class MessageModel extends Backbone.Model { // TODO: clean up this typing return window.i18n.stripped(...([token, args] as GetMessageArgs)); } + if (groupUpdate.promoted?.length) { + // @ts-expect-error -- TODO: Fix by using new i18n builder + const { token, args } = getPromotedGroupUpdateChangeStr(groupUpdate.promoted, groupName); + // TODO: clean up this typing + return window.i18n.stripped(...([token, args] as GetMessageArgs)); + } window.log.warn('did not build a specific change for getDescription of ', groupUpdate); return window.i18n.stripped('groupUpdated'); } - if (this.isGroupInvitation()) { + if (this.isCommunityInvitation()) { return `😎 ${window.i18n.stripped('communityInvitation')}`; } @@ -290,20 +343,16 @@ export class MessageModel extends Backbone.Model { ) as DataExtractionNotificationMsg; if (dataExtraction.type === SignalService.DataExtractionNotification.Type.SCREENSHOT) { return window.i18n.stripped('screenshotTaken', { - name: getConversationController().getContactProfileNameOrShortenedPubKey( - dataExtraction.source - ), + name: ConvoHub.use().getContactProfileNameOrShortenedPubKey(dataExtraction.source), }); } return window.i18n.stripped('attachmentsMediaSaved', { - name: getConversationController().getContactProfileNameOrShortenedPubKey( - dataExtraction.source - ), + name: ConvoHub.use().getContactProfileNameOrShortenedPubKey(dataExtraction.source), }); } if (this.isCallNotification()) { - const name = getConversationController().getContactProfileNameOrShortenedPubKey( + const name = ConvoHub.use().getContactProfileNameOrShortenedPubKey( this.get('conversationId') ); const callNotificationType = this.get('callNotificationType'); @@ -324,7 +373,7 @@ export class MessageModel extends Backbone.Model { // NOTE For now we only show interaction errors in the message history if (interactionStatus === ConversationInteractionStatus.Error) { - const convo = getConversationController().get(this.get('conversationId')); + const convo = ConvoHub.use().get(this.get('conversationId')); if (convo) { const isGroup = !convo.isPrivate(); @@ -361,8 +410,8 @@ export class MessageModel extends Backbone.Model { } } if (this.isExpirationTimerUpdate()) { - const expireTimerUpdate = this.getExpirationTimerUpdate(); - const expireTimer = expireTimerUpdate?.expireTimer; + const expireTimerUpdate = this.getExpirationTimerUpdate() as ExpirationTimerUpdate; // the isExpirationTimerUpdate above enforces this + const expireTimer = expireTimerUpdate.expireTimer; const convo = this.getConversation(); if (!convo) { return ''; @@ -375,39 +424,20 @@ export class MessageModel extends Backbone.Model { ); const source = expireTimerUpdate?.source; - const isUs = UserUtils.isUsFromCache(source); - - const authorName = - getConversationController() - .get(source || '') - ?.getNicknameOrRealUsernameOrPlaceholder() || window.i18n.stripped('unknown'); - - if (!expireTimerUpdate || expirationMode === 'off' || !expireTimer || expireTimer === 0) { - if (isUs) { - return window.i18n.stripped('disappearingMessagesTurnedOffYou'); - } - return window.i18n.stripped('disappearingMessagesTurnedOff', { - name: authorName, - }); - } - - const localizedMode = - expirationMode === 'deleteAfterRead' - ? window.i18n.stripped('disappearingMessagesTypeRead') - : window.i18n.stripped('disappearingMessagesTypeSent'); + const i18nProps = getTimerNotificationStr({ + convoId: convo.id, + author: source as PubkeyType, + expirationMode, + isGroup: convo.isGroup(), + timespanSeconds: expireTimer, + }); - if (isUs) { - return window.i18n.stripped('disappearingMessagesSetYou', { - time: TimerOptions.getAbbreviated(expireTimerUpdate.expireTimer || 0), - disappearing_messages_type: localizedMode, - }); + if ('args' in i18nProps) { + return window.i18n.stripped( + ...([i18nProps.token, i18nProps.args] as GetMessageArgs) + ); } - - return window.i18n.stripped('disappearingMessagesSet', { - time: TimerOptions.getAbbreviated(expireTimerUpdate.expireTimer || 0), - name: authorName, - disappearing_messages_type: localizedMode, - }); + return window.i18n.stripped(...([i18nProps.token] as GetMessageArgs)); } const body = this.get('body'); if (body) { @@ -418,8 +448,7 @@ export class MessageModel extends Backbone.Model { (pubkeysInDesc || []).forEach((pubkeyWithAt: string) => { const pubkey = pubkeyWithAt.slice(1); const isUS = isUsAnySogsFromCache(pubkey); - const displayName = - getConversationController().getContactProfileNameOrShortenedPubKey(pubkey); + const displayName = ConvoHub.use().getContactProfileNameOrShortenedPubKey(pubkey); if (isUS) { bodyMentionsMappedToNames = bodyMentionsMappedToNames?.replace( pubkeyWithAt, @@ -443,12 +472,11 @@ export class MessageModel extends Backbone.Model { return ''; } - public onDestroy() { - void this.cleanup(); - } - public async cleanup() { - await deleteExternalMessageFiles(this.attributes); + const changed = await deleteExternalMessageFiles(this.attributes); + if (changed) { + await this.commit(); + } } public getPropsForExpiringMessage(): PropsForExpiringMessage { @@ -517,17 +545,17 @@ export class MessageModel extends Backbone.Model { } public getPropsForGroupInvitation(): PropsForGroupInvitation | null { - if (!this.isGroupInvitation()) { + const invitation = this.getCommunityInvitation(); + if (!invitation || !invitation.url) { return null; } - const invitation = this.get('groupInvitation'); let serverAddress = ''; try { const url = new URL(invitation.url); serverAddress = url.origin; } catch (e) { - window?.log?.warn('failed to get hostname from opengroupv2 invitation', invitation); + window?.log?.warn('failed to get hostname from open groupv2 invitation', invitation); } return { @@ -544,7 +572,7 @@ export class MessageModel extends Backbone.Model { if (!this.isDataExtractionNotification()) { return null; } - const dataExtractionNotification = this.get('dataExtractionNotification'); + const dataExtractionNotification = this.getDataExtractionNotification(); if (!dataExtractionNotification) { window.log.warn('dataExtractionNotification should not happen'); @@ -588,7 +616,6 @@ export class MessageModel extends Backbone.Model { public getPropsForGroupUpdateMessage(): PropsForGroupUpdate | null { const groupUpdate = this.getGroupUpdateAsArray(); - if (!groupUpdate || isEmpty(groupUpdate)) { return null; } @@ -602,7 +629,16 @@ export class MessageModel extends Backbone.Model { if (groupUpdate.joined?.length) { const change: PropsForGroupUpdateAdd = { type: 'add', - added: groupUpdate.joined, + added: groupUpdate.joined as Array, + withHistory: false, + }; + return { change, ...sharedProps }; + } + if (groupUpdate.joinedWithHistory?.length) { + const change: PropsForGroupUpdateAdd = { + type: 'add', + added: groupUpdate.joinedWithHistory as Array, + withHistory: true, }; return { change, ...sharedProps }; } @@ -610,7 +646,7 @@ export class MessageModel extends Backbone.Model { if (groupUpdate.kicked?.length) { const change: PropsForGroupUpdateKicked = { type: 'kicked', - kicked: groupUpdate.kicked, + kicked: groupUpdate.kicked as Array, }; return { change, ...sharedProps }; } @@ -618,11 +654,18 @@ export class MessageModel extends Backbone.Model { if (groupUpdate.left?.length) { const change: PropsForGroupUpdateLeft = { type: 'left', - left: groupUpdate.left, + left: groupUpdate.left as Array, }; return { change, ...sharedProps }; } + if (groupUpdate.promoted?.length) { + const change: PropsForGroupUpdatePromoted = { + type: 'promoted', + promoted: groupUpdate.promoted as Array, + }; + return { change, ...sharedProps }; + } if (groupUpdate.name) { const change: PropsForGroupUpdateName = { type: 'name', @@ -630,12 +673,14 @@ export class MessageModel extends Backbone.Model { }; return { change, ...sharedProps }; } + if (groupUpdate.avatarChange) { + const change: PropsForGroupUpdateAvatarChange = { + type: 'avatarChange', + }; + return { change, ...sharedProps }; + } - // Just show a "Group Updated" message, not sure what was changed - const changeGeneral: PropsForGroupUpdateGeneral = { - type: 'general', - }; - return { change: changeGeneral, ...sharedProps }; + return null; } public getMessagePropStatus(): LastMessageStatusType { @@ -665,10 +710,6 @@ export class MessageModel extends Backbone.Model { return undefined; } - if (this.getConversation()?.get('left')) { - return 'sent'; - } - const readBy = this.get('read_by') || []; if (Storage.get(SettingsKey.settingsReadReceipt) && readBy.length > 0) { return 'read'; @@ -894,7 +935,7 @@ export class MessageModel extends Backbone.Model { linkPreviewPromise = uploadLinkPreviewsV3(firstPreviewWithData, openGroupV2); quotePromise = uploadQuoteThumbnailsV3(openGroupV2, quoteWithData); } else { - // if that's not an sogs, the file is uploaded to the fileserver instead + // if that's not an sogs, the file is uploaded to the file server instead attachmentPromise = uploadAttachmentsToFileServer(finalAttachments); linkPreviewPromise = uploadLinkPreviewToFileServer(firstPreviewWithData); quotePromise = uploadQuoteThumbnailsToFileServer(quoteWithData); @@ -944,7 +985,7 @@ export class MessageModel extends Backbone.Model { public async markAsDeleted() { this.set({ isDeleted: true, - body: window.i18n('deleteMessageDeleted', { count: 1 }), + body: window.i18n('deleteMessageDeletedGlobally'), quote: undefined, groupInvitation: undefined, dataExtractionNotification: undefined, @@ -955,6 +996,11 @@ export class MessageModel extends Backbone.Model { preview: undefined, reacts: undefined, reactsIndex: undefined, + flags: undefined, + callNotificationType: undefined, + interactionNotification: undefined, + reaction: undefined, + messageRequestResponse: undefined, }); // we can ignore the result of that markMessageReadNoCommit as it would only be used // to refresh the expiry of it(but it is already marked as "deleted", so we don't care) @@ -988,7 +1034,7 @@ export class MessageModel extends Backbone.Model { if (conversation.isPublic()) { const openGroupParams: OpenGroupVisibleMessageParams = { identifier: this.id, - timestamp: GetNetworkTime.getNowWithNetworkOffset(), + createAtNetworkTimestamp: NetworkTime.now(), lokiProfile: UserUtils.getOurProfile(), body, attachments, @@ -1003,7 +1049,7 @@ export class MessageModel extends Backbone.Model { const openGroupMessage = new OpenGroupVisibleMessage(openGroupParams); const openGroup = OpenGroupData.getV2OpenGroupRoom(conversation.id); - return getMessageQueue().sendToOpenGroupV2({ + return MessageQueue.use().sendToOpenGroupV2({ message: openGroupMessage, roomInfos, blinded: roomHasBlindEnabled(openGroup), @@ -1011,12 +1057,12 @@ export class MessageModel extends Backbone.Model { }); } - const timestamp = Date.now(); // force a new timestamp to handle user fixed his clock; + const createAtNetworkTimestamp = NetworkTime.now(); const chatParams: VisibleMessageParams = { identifier: this.id, body, - timestamp, + createAtNetworkTimestamp, attachments, preview: preview ? [preview] : [], quote, @@ -1038,10 +1084,10 @@ export class MessageModel extends Backbone.Model { } if (conversation.isPrivate()) { - return getMessageQueue().sendToPubKey( + return MessageQueue.use().sendToPubKey( PubKey.cast(conversation.id), chatMessage, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); } @@ -1054,16 +1100,25 @@ export class MessageModel extends Backbone.Model { ); } + if (conversation.isClosedGroupV2()) { + const groupV2VisibleMessage = new ClosedGroupV2VisibleMessage({ + destination: PubKey.cast(this.get('conversationId')).key as GroupPubkeyType, + chatMessage, + }); + // we need the return await so that errors are caught in the catch {} + return await MessageQueue.use().sendToGroupV2({ + message: groupV2VisibleMessage, + }); + } + const closedGroupVisibleMessage = new ClosedGroupVisibleMessage({ - identifier: this.id, - groupId: PubKey.cast(this.get('conversationId')), - timestamp, + groupId: PubKey.cast(this.get('conversationId')).key, chatMessage, }); - return getMessageQueue().sendToGroup({ + return MessageQueue.use().sendToGroup({ message: closedGroupVisibleMessage, - namespace: SnodeNamespaces.ClosedGroupMessage, + namespace: SnodeNamespaces.LegacyClosedGroup, }); } catch (e) { await this.saveErrors(e); @@ -1084,7 +1139,7 @@ export class MessageModel extends Backbone.Model { // This needs to be an unsafe call, because this method is called during // initial module setup. We may be in the middle of the initial fetch to // the database. - return getConversationController().getUnsafe(this.get('conversationId')); + return ConvoHub.use().getUnsafe(this.get('conversationId')); } public getQuoteContact() { @@ -1097,7 +1152,7 @@ export class MessageModel extends Backbone.Model { return null; } - return getConversationController().get(author); + return ConvoHub.use().get(author); } public getSource() { @@ -1121,17 +1176,21 @@ export class MessageModel extends Backbone.Model { * * @param messageHash */ - public async updateMessageHash(messageHash: string) { + public updateMessageHash(messageHash: string) { if (!messageHash) { window?.log?.error('Message hash not provided to update message hash'); } - this.set({ - messageHash, - }); + if (this.get('messageHash') !== messageHash) { + window?.log?.info(`updated message ${this.id} with hash: ${messageHash}`); + + this.set({ + messageHash, + }); + } } public async sendSyncMessageOnly(contentMessage: ContentMessage) { - const now = GetNetworkTime.getNowWithNetworkOffset(); + const now = NetworkTime.now(); this.set({ sent_to: [UserUtils.getOurPubKeyStrFromCache()], @@ -1177,8 +1236,8 @@ export class MessageModel extends Backbone.Model { ); if (syncMessage) { - await getMessageQueue().sendSyncMessage({ - namespace: SnodeNamespaces.UserMessages, + await MessageQueue.use().sendSyncMessage({ + namespace: SnodeNamespaces.Default, message: syncMessage, }); } @@ -1221,14 +1280,11 @@ export class MessageModel extends Backbone.Model { if (!this.id) { throw new Error('A message always needs an id'); } - - perfStart(`messageCommit-${this.id}`); // because the saving to db calls _cleanData which mutates the field for cleaning, we need to save a copy const id = await Data.saveMessage(cloneDeep(this.attributes)); if (triggerUIUpdate) { this.dispatchMessageUpdate(); } - perfEnd(`messageCommit-${this.id}`, 'messageCommit'); return id; } @@ -1337,7 +1393,7 @@ export class MessageModel extends Backbone.Model { } // check the convo from this user // we want the convo of the sender of this message - const senderConvo = getConversationController().get(senderConvoId); + const senderConvo = ConvoHub.use().get(senderConvoId); if (!senderConvo) { return false; } @@ -1358,45 +1414,50 @@ export class MessageModel extends Backbone.Model { } /** - * Before, group_update attributes could be just the string 'You' and not an array. - * Using this method to get the group update makes sure than the joined, kicked, or left are always an array of string, or undefined + * A long time ago, group_update attributes could be just the string 'You' and not an array of pubkeys. + * Using this method to get the group update makes sure than the joined, kicked, or left are always an array of string, or undefined. + * This is legacy code, our joined, kicked, left, etc should have been saved as an Array for a long time now. */ private getGroupUpdateAsArray() { const groupUpdate = this.get('group_update'); if (!groupUpdate || isEmpty(groupUpdate)) { return undefined; } + const forcedArrayUpdate: MessageGroupUpdate = {}; - const left: Array | undefined = Array.isArray(groupUpdate.left) - ? groupUpdate.left - : groupUpdate.left - ? [groupUpdate.left] + forcedArrayUpdate.joined = Array.isArray(groupUpdate.joined) + ? groupUpdate.joined + : groupUpdate.joined + ? [groupUpdate.joined] + : undefined; + + forcedArrayUpdate.joinedWithHistory = Array.isArray(groupUpdate.joinedWithHistory) + ? groupUpdate.joinedWithHistory + : groupUpdate.joinedWithHistory + ? [groupUpdate.joinedWithHistory] : undefined; - const kicked: Array | undefined = Array.isArray(groupUpdate.kicked) + + forcedArrayUpdate.kicked = Array.isArray(groupUpdate.kicked) ? groupUpdate.kicked : groupUpdate.kicked ? [groupUpdate.kicked] : undefined; - const joined: Array | undefined = Array.isArray(groupUpdate.joined) - ? groupUpdate.joined - : groupUpdate.joined - ? [groupUpdate.joined] + + forcedArrayUpdate.promoted = Array.isArray(groupUpdate.promoted) + ? groupUpdate.promoted + : groupUpdate.promoted + ? [groupUpdate.promoted] : undefined; - const forcedArrayUpdate: MessageGroupUpdate = {}; + forcedArrayUpdate.left = Array.isArray(groupUpdate.left) + ? groupUpdate.left + : groupUpdate.left + ? [groupUpdate.left] + : undefined; + + forcedArrayUpdate.name = groupUpdate.name; + forcedArrayUpdate.avatarChange = groupUpdate.avatarChange; - if (left) { - forcedArrayUpdate.left = left; - } - if (joined) { - forcedArrayUpdate.joined = joined; - } - if (kicked) { - forcedArrayUpdate.kicked = kicked; - } - if (groupUpdate.name) { - forcedArrayUpdate.name = groupUpdate.name; - } return forcedArrayUpdate; } @@ -1452,7 +1513,7 @@ export class MessageCollection extends Backbone.Collection {} MessageCollection.prototype.model = MessageModel; export function findAndFormatContact(pubkey: string): FindAndFormatContactType { - const contactModel = getConversationController().get(pubkey); + const contactModel = ConvoHub.use().get(pubkey); let profileName: string | null = null; let isMe = false; diff --git a/ts/models/messageType.ts b/ts/models/messageType.ts index 5e87194bb1..e08aa9a01e 100644 --- a/ts/models/messageType.ts +++ b/ts/models/messageType.ts @@ -1,3 +1,4 @@ +import { PubkeyType } from 'libsession_util_nodejs'; import { defaultsDeep } from 'lodash'; import { v4 as uuidv4 } from 'uuid'; import { @@ -39,7 +40,7 @@ export interface MessageAttributes { read_by: Array; // we actually only care about the length of this. values are not used for anything type: MessageModelType; group_update?: MessageGroupUpdate; - groupInvitation?: any; + groupInvitation?: { url: string | undefined; name: string } | undefined; attachments?: any; conversationId: string; errors?: any; @@ -159,10 +160,13 @@ export type PropsForMessageRequestResponse = MessageRequestResponseMsg & { }; export type MessageGroupUpdate = { - left?: Array; - joined?: Array; - kicked?: Array; + left?: Array; + joined?: Array; + joinedWithHistory?: Array; + kicked?: Array; + promoted?: Array; name?: string; + avatarChange?: boolean; }; export interface MessageAttributesOptionals { @@ -184,9 +188,8 @@ export interface MessageAttributesOptionals { read_by?: Array; // we actually only care about the length of this. values are not used for anything type: MessageModelType; group_update?: MessageGroupUpdate; - groupInvitation?: any; + groupInvitation?: { url: string | undefined; name: string } | undefined; attachments?: any; - contact?: any; conversationId: string; errors?: any; flags?: number; diff --git a/ts/models/timerNotifications.ts b/ts/models/timerNotifications.ts new file mode 100644 index 0000000000..94199e9cd6 --- /dev/null +++ b/ts/models/timerNotifications.ts @@ -0,0 +1,100 @@ +import { PubkeyType } from 'libsession_util_nodejs'; +import { ConvoHub } from '../session/conversations'; +import { PropsForExpirationTimer } from '../state/ducks/conversations'; +import { PubKey } from '../session/types'; +import { UserUtils } from '../session/utils'; +import { TimerOptions } from '../session/disappearing_messages/timerOptions'; +import { isLegacyDisappearingModeEnabled } from '../session/disappearing_messages/legacy'; +import type { LocalizerComponentPropsObject } from '../types/localizer'; + +export function getTimerNotificationStr({ + expirationMode, + timespanSeconds, + convoId, + author, + isGroup, +}: Pick & { + author: PubkeyType; + convoId: string; + isGroup: boolean; +}): LocalizerComponentPropsObject { + const is03group = PubKey.is03Pubkey(convoId); + const authorIsUs = author === UserUtils.getOurPubKeyStrFromCache(); + const isLegacyGroup = isGroup && !is03group; + const timespanText = TimerOptions.getName(timespanSeconds || 0); + const disabled = !timespanSeconds || timespanSeconds <= 0; + + const authorName = ConvoHub.use().getContactProfileNameOrShortenedPubKey(author); + + // TODO: legacy messages support will be removed in a future release + if (isLegacyDisappearingModeEnabled(expirationMode)) { + return { + token: 'deleteAfterLegacyDisappearingMessagesTheyChangedTimer', + args: { + name: authorIsUs ? window.i18n('you') : authorName, + time: timespanText, + }, + } as const; + } + + const disappearing_messages_type = + expirationMode === 'deleteAfterRead' + ? window.i18n('disappearingMessagesTypeRead') + : window.i18n('disappearingMessagesTypeSent'); + + if (isLegacyGroup || isGroup) { + if (disabled) { + return authorIsUs + ? { + token: 'disappearingMessagesTurnedOffYouGroup', + } + : { + token: 'disappearingMessagesTurnedOffGroup', + args: { + name: authorName, + }, + }; + } + return authorIsUs + ? { + token: 'disappearingMessagesSetYou', + args: { time: timespanText, disappearing_messages_type }, + } + : { + token: 'disappearingMessagesSet', + args: { name: authorName, time: timespanText, disappearing_messages_type }, + }; + } + + // legacy groups and groups are handled above. + // This can only be a private chat or Note to Self. + if (disabled) { + return authorIsUs + ? { + token: 'disappearingMessagesTurnedOffYou', + } + : { + token: 'disappearingMessagesTurnedOff', + args: { + name: authorName, + }, + }; + } + + return authorIsUs + ? { + token: 'disappearingMessagesSetYou', + args: { + time: timespanText, + disappearing_messages_type, + }, + } + : { + token: 'disappearingMessagesSet', + args: { + time: timespanText, + disappearing_messages_type, + name: authorName, + }, + }; +} diff --git a/ts/models/types.ts b/ts/models/types.ts index 9ceae75d7b..159c36934a 100644 --- a/ts/models/types.ts +++ b/ts/models/types.ts @@ -5,12 +5,12 @@ * To identity between an open or closed group before v3, we need to rely on the prefix (05 is closed groups, 'http%' is opengroup) * * - * We will need to support existing closed groups foir now, but we will be able to get rid of existing closed groups at some point. + * We will need to support existing closed groups for now, but we will be able to get rid of existing closed groups at some point. * When we do get rid of them, we will be able to remove any GROUP conversation with prefix 05 (as they are old closed groups) and update the remaining GROUP to be opengroups instead */ export enum ConversationTypeEnum { GROUP = 'group', - GROUPV3 = 'groupv3', + GROUPV2 = 'groupv2', PRIVATE = 'private', } diff --git a/ts/node/attachment_channel.ts b/ts/node/attachment_channel.ts index f53984dbb4..fe7d068082 100644 --- a/ts/node/attachment_channel.ts +++ b/ts/node/attachment_channel.ts @@ -1,14 +1,12 @@ -import path from 'path'; import { ipcMain } from 'electron'; +import fse from 'fs-extra'; +import { glob } from 'glob'; import { isString, map } from 'lodash'; +import path from 'path'; import rimraf from 'rimraf'; -import fse from 'fs-extra'; -import pify from 'pify'; -// eslint-disable-next-line import/no-named-default -import { default as glob } from 'glob'; +import { getAttachmentsPath } from '../shared/attachments/shared_attachments'; import { sqlNode } from './sql'; // checked - only node -import { createDeleter, getAttachmentsPath } from '../shared/attachments/shared_attachments'; let initialized = false; @@ -23,36 +21,18 @@ const ensureDirectory = async (userDataPath: string) => { await fse.ensureDir(getAttachmentsPath(userDataPath)); }; -const deleteAll = async ({ - userDataPath, - attachments, -}: { - userDataPath: string; - attachments: any; -}) => { - const deleteFromDisk = createDeleter(getAttachmentsPath(userDataPath)); - - for (let index = 0, max = attachments.length; index < max; index += 1) { - const file = attachments[index]; - // eslint-disable-next-line no-await-in-loop - await deleteFromDisk(file); - } - - console.log(`deleteAll: deleted ${attachments.length} files`); -}; - const getAllAttachments = async (userDataPath: string) => { const dir = getAttachmentsPath(userDataPath); const pattern = path.join(dir, '**', '*'); - const files = await pify(glob)(pattern, { nodir: true }); + const files = await glob(pattern, { nodir: true }); return map(files, file => path.relative(dir, file)); }; async function cleanupOrphanedAttachments(userDataPath: string) { const allAttachments = await getAllAttachments(userDataPath); const orphanedAttachments = sqlNode.removeKnownAttachments(allAttachments); - await deleteAll({ + await sqlNode.deleteAll({ userDataPath, attachments: orphanedAttachments, }); diff --git a/ts/node/hexStrings.ts b/ts/node/hexStrings.ts new file mode 100644 index 0000000000..782da7a1de --- /dev/null +++ b/ts/node/hexStrings.ts @@ -0,0 +1,50 @@ +/** + * Checks if a string is hex string. A hex string is a string like "0512ab". + * @param maybeHex the string to test + * @returns true if this string is a hex string. + */ +const isHexString = (maybeHex: string) => + maybeHex.length !== 0 && maybeHex.length % 2 === 0 && !/[^a-fA-F0-9]/u.test(maybeHex); + +/** + * Returns the Uint8Array corresponding to the given string. + * Note: this is different than the libsodium.from_hex(). + * This takes a string like "0102" and converts it to an UIin8Array like [1, 2] whereare + * the libsodium one returns [0, 1, 0, 2] + * + * Throws an error if this string is not a hex string. + * @param hexString the string to convert from + * @returns the Uint8Arrat + */ +const fromHexString = (hexString: string): Uint8Array => { + if (!isHexString(hexString)) { + throw new Error('Not a hex string'); + } + const matches = hexString.match(/.{1,2}/g); + if (!matches) { + return new Uint8Array(); + } + return Uint8Array.from(matches.map(byte => parseInt(byte, 16))); +}; + +/** + * Returns the Uint8Array corresponding to the given string, without a 03/05 prefix when there is a prefix + * Note: this is different than the libsodium.from_hex(). + */ +const fromHexStringNoPrefix = (hexString: string): Uint8Array => { + const asHex = fromHexString(hexString); + if (asHex.length === 33) { + return asHex.slice(1); + } + return asHex; +}; + +const toHexString = (bytes: Uint8Array) => + bytes.reduce((str, byte) => str + byte.toString(16).padStart(2, '0'), ''); + +export const HexString = { + toHexString, + fromHexString, + fromHexStringNoPrefix, + isHexString, +}; diff --git a/ts/node/locale.ts b/ts/node/locale.ts index 7acf1527ab..9363b67965 100644 --- a/ts/node/locale.ts +++ b/ts/node/locale.ts @@ -11,7 +11,7 @@ export function normalizeLocaleName(locale: string) { const dashedLocale = locale.replaceAll('_', '-'); // Note: this is a pain, but we somehow needs to keep in sync this logic and the LOCALE_PATH_MAPPING from - // https://github.com/oxen-io/session-shared-scripts/blob/main/crowdin/generate_desktop_strings.py + // https://github.com/session-foundation/session-shared-scripts/blob/main/crowdin/generate_desktop_strings.py // What we do, is keep as is, anything given in LOCALE_PATH_MAPPING, but otherwise, keep only the first part of the locale. // So `es-419` is allowed, but `es-es` is hardcoded to es, fr_FR is hardcoded to fr, and so on. if ( diff --git a/ts/node/logging.ts b/ts/node/logging.ts index bf232fe072..80bd5f7d82 100644 --- a/ts/node/logging.ts +++ b/ts/node/logging.ts @@ -1,11 +1,12 @@ // NOTE: Temporarily allow `then` until we convert the entire file to `async` / `await`: /* eslint-disable more/no-then */ -import path from 'path'; import fs from 'fs'; +import path from 'path'; -import { app, ipcMain as ipc } from 'electron'; import Logger from 'bunyan'; +// eslint-disable-next-line import/order +import { app, ipcMain as ipc } from 'electron'; import _ from 'lodash'; import rimraf from 'rimraf'; @@ -184,7 +185,7 @@ async function fetchLogFile(logFile: string) { const fileListEntry = { level: 30, // INFO time: now.toJSON(), - msg: `Loaded this from logfile: "${logFile}"`, + msg: `Loaded this from log file: "${logFile}"`, }; const read = await fetchLog(logFile); diff --git a/ts/node/migration/sessionMigrations.ts b/ts/node/migration/sessionMigrations.ts index bf754b5786..158a5f38d3 100644 --- a/ts/node/migration/sessionMigrations.ts +++ b/ts/node/migration/sessionMigrations.ts @@ -105,6 +105,7 @@ const LOKI_SCHEMA_VERSIONS = [ updateToSessionSchemaVersion35, updateToSessionSchemaVersion36, updateToSessionSchemaVersion37, + updateToSessionSchemaVersion38, ]; function updateToSessionSchemaVersion1(currentVersion: number, db: BetterSqlite3.Database) { @@ -627,7 +628,7 @@ function updateToSessionSchemaVersion20(currentVersion: number, db: BetterSqlite // // obj.profile.displayName is the display as this user set it. // if (obj?.nickname?.length && obj?.profile?.displayName?.length) { - // // this one has a nickname set, but name is unset, set it to the displayName in the lokiProfile if it's exisitng + // // this one has a nickname set, but name is unset, set it to the displayName in the lokiProfile if it's existing // obj.name = obj.profile.displayName; // sqlNode.saveConversation(obj as ConversationAttributes, db); // } @@ -931,8 +932,8 @@ function updateToSessionSchemaVersion27(currentVersion: number, db: BetterSqlite } function getAllOpenGroupV2Conversations(instance: BetterSqlite3.Database) { - // first _ matches all opengroupv1 (they are completely removed in a migration now), - // second _ force a second char to be there, so it can only be opengroupv2 convos + // first _ matches all opengroup v1 (they are completely removed in a migration now), + // second _ force a second char to be there, so it can only be opengroup v2 convos const rows = instance .prepare( @@ -1463,7 +1464,7 @@ function updateToSessionSchemaVersion31(currentVersion: number, db: BetterSqlite }); /** - * Setup up the UserGroups Wrapper with all the comunities details which needs to be stored in it. + * Setup up the UserGroups Wrapper with all the communities details which needs to be stored in it. */ // this filter is based on the `isCommunityToStoreInWrapper` function. @@ -1493,7 +1494,7 @@ function updateToSessionSchemaVersion31(currentVersion: number, db: BetterSqlite }); console.info( - '===================== Done with communinities inserting =======================' + '===================== Done with communities inserting =======================' ); } @@ -1616,11 +1617,11 @@ function updateToSessionSchemaVersion33(currentVersion: number, db: BetterSqlite const loggedInUser = getLoggedInUserConvoDuringMigration(db); if (!loggedInUser?.ourKeys) { - // no user loggedin was empty. Considering no users are logged in + // Considering no users are logged in writeSessionSchemaVersion(targetVersion, db); return; } - // a user is logged in, we want to enable the 'inbox' polling for sogs, only if the current userwrapper for that field is undefined + // a user is logged in, we want to enable the 'inbox' polling for sogs, only if the current user wrapper for that field is undefined const { privateEd25519, publicKeyHex } = loggedInUser.ourKeys; // Get existing config wrapper dump and update it @@ -1970,6 +1971,38 @@ function updateToSessionSchemaVersion37(currentVersion: number, db: BetterSqlite console.log(`updateToSessionSchemaVersion${targetVersion}: success!`); } +function updateToSessionSchemaVersion38(currentVersion: number, db: BetterSqlite3.Database) { + const targetVersion = 38; + if (currentVersion >= targetVersion) { + return; + } + + console.log(`updateToSessionSchemaVersion${targetVersion}: starting...`); + + db.transaction(() => { + db.exec(`ALTER TABLE ${MESSAGES_TABLE} ADD COLUMN messageHash TEXT; + UPDATE ${MESSAGES_TABLE} SET + messageHash = json_extract(json, '$.messageHash'); + `); + + db.exec(`CREATE INDEX messages_t_messageHash ON ${MESSAGES_TABLE} ( + messageHash + );`); + db.exec(`CREATE INDEX messages_t_messageHash_author ON ${MESSAGES_TABLE} ( + messageHash, + source + );`); + db.exec(`CREATE INDEX messages_t_messageHash_author_convoId ON ${MESSAGES_TABLE} ( + messageHash, + source, + conversationId + );`); + writeSessionSchemaVersion(targetVersion, db); + })(); + + console.log(`updateToSessionSchemaVersion${targetVersion}: success!`); +} + export function printTableColumns(table: string, db: BetterSqlite3.Database) { console.info(db.pragma(`table_info('${table}');`)); } diff --git a/ts/node/migration/signalMigrations.ts b/ts/node/migration/signalMigrations.ts index 1dc919e466..6d7bddcd70 100644 --- a/ts/node/migration/signalMigrations.ts +++ b/ts/node/migration/signalMigrations.ts @@ -1,6 +1,6 @@ -import path from 'path'; import * as BetterSqlite3 from '@signalapp/better-sqlite3'; import { isNumber } from 'lodash'; +import path from 'path'; import { ATTACHMENT_DOWNLOADS_TABLE, diff --git a/ts/node/sodiumNode.ts b/ts/node/sodiumNode.ts index 6961154038..3657f92ce4 100644 --- a/ts/node/sodiumNode.ts +++ b/ts/node/sodiumNode.ts @@ -1,8 +1,10 @@ import * as wrappers from 'libsodium-wrappers-sumo'; +type LibSodiumWrappers = typeof wrappers; + export async function getSodiumNode() { // don't ask me why, but when called from node we have to do this as the types are incorrect?! const anyWrappers = wrappers as any; await anyWrappers.ready; - return anyWrappers.default; + return anyWrappers.default as LibSodiumWrappers; } diff --git a/ts/node/sql.ts b/ts/node/sql.ts index 56de70b9b0..f27cd387b4 100644 --- a/ts/node/sql.ts +++ b/ts/node/sql.ts @@ -1,3 +1,4 @@ +// eslint-disable-next-line import/order import * as BetterSqlite3 from '@signalapp/better-sqlite3'; import { app, clipboard, dialog, Notification } from 'electron'; import fs from 'fs'; @@ -25,6 +26,7 @@ import { uniq, } from 'lodash'; +import { GroupPubkeyType } from 'libsession_util_nodejs'; import { ConversationAttributes } from '../models/conversationAttributes'; import { PubKey } from '../session/types/PubKey'; // checked - only node import { redactAll } from '../util/privacy'; // checked - only node @@ -62,10 +64,16 @@ import { } from '../types/sqlSharedTypes'; import { KNOWN_BLINDED_KEYS_ITEM, SettingsKey } from '../data/settings-key'; +import { + FindAllMessageFromSendersInConversationTypeArgs, + FindAllMessageHashesInConversationMatchingAuthorTypeArgs, + FindAllMessageHashesInConversationTypeArgs, +} from '../data/sharedDataTypes'; import { MessageAttributes } from '../models/messageType'; import { SignalService } from '../protobuf'; import { Quote } from '../receiver/types'; import { DURATION } from '../session/constants'; +import { createDeleter, getAttachmentsPath } from '../shared/attachments/shared_attachments'; import { ed25519Str } from '../session/utils/String'; import { getSQLCipherIntegrityCheck, @@ -148,7 +156,7 @@ async function initializeSql({ i18n: SetupI18nReturnType; passwordAttempt: boolean; }) { - console.info('initializeSql sqlnode'); + console.info('initializeSql sql node'); if (isInstanceInitialized()) { throw new Error('Cannot initialize more than once!'); } @@ -232,7 +240,7 @@ async function initializeSql({ return true; } -function removeDB(configDir = null) { +function removeDB(configDir: string | null = null) { if (isInstanceInitialized()) { throw new Error('removeDB: Cannot erase database when it is open!'); } @@ -289,7 +297,7 @@ function getGuardNodes() { function updateGuardNodes(nodes: Array) { assertGlobalInstance().transaction(() => { assertGlobalInstance().exec(`DELETE FROM ${GUARD_NODE_TABLE}`); - nodes.map(edkey => + nodes.map(edKey => assertGlobalInstance() .prepare( `INSERT INTO ${GUARD_NODE_TABLE} ( @@ -297,7 +305,7 @@ function updateGuardNodes(nodes: Array) { ) values ($ed25519PubKey)` ) .run({ - ed25519PubKey: edkey, + ed25519PubKey: edKey, }) ); })(); @@ -485,8 +493,8 @@ function saveConversation(data: ConversationAttributes): SaveConversationReturn blocksSogsMsgReqsTimestamp, } = formatted; - const omited = omit(formatted); - const keys = Object.keys(omited); + const omitted = omit(formatted); + const keys = Object.keys(omitted); const columnsCommaSeparated = keys.join(', '); const valuesArgs = keys.map(k => `$${k}`).join(', '); @@ -840,6 +848,7 @@ function saveMessage(data: MessageAttributes) { expireTimer, expirationStartTimestamp, flags, + messageHash, } = data; if (!id) { @@ -872,6 +881,7 @@ function saveMessage(data: MessageAttributes) { type: type || '', unread, flags: flags ?? 0, + messageHash, }; assertGlobalInstance() @@ -896,7 +906,8 @@ function saveMessage(data: MessageAttributes) { source, type, unread, - flags + flags, + messageHash ) values ( $id, $json, @@ -917,7 +928,8 @@ function saveMessage(data: MessageAttributes) { $source, $type, $unread, - $flags + $flags, + $messageHash );` ) .run(payload); @@ -966,7 +978,7 @@ function saveSeenMessageHash(data: any) { try { assertGlobalInstance() .prepare( - `INSERT INTO seenMessages ( + `INSERT OR REPLACE INTO seenMessages ( expiresAt, hash ) values ( @@ -1028,6 +1040,47 @@ function removeMessagesByIds(ids: Array, instance?: BetterSqlite3.Databa console.log(`removeMessagesByIds of length ${ids.length} took ${Date.now() - start}ms`); } +function removeAllMessagesInConversationSentBefore( + { + deleteBeforeSeconds, + conversationId, + }: { deleteBeforeSeconds: number; conversationId: GroupPubkeyType }, + instance?: BetterSqlite3.Database +) { + const msgIds = assertGlobalInstanceOrInstance(instance) + .prepare( + `SELECT id FROM ${MESSAGES_TABLE} WHERE conversationId = $conversationId AND sent_at <= $beforeMs;` + ) + .all({ conversationId, beforeMs: deleteBeforeSeconds * 1000 }); + + assertGlobalInstanceOrInstance(instance) + .prepare( + `DELETE FROM ${MESSAGES_TABLE} WHERE conversationId = $conversationId AND sent_at <= $beforeMs;` + ) + .run({ conversationId, beforeMs: deleteBeforeSeconds * 1000 }); + console.info('removeAllMessagesInConversationSentBefore deleted msgIds:', JSON.stringify(msgIds)); + return msgIds.map(m => m.id); +} + +async function getAllMessagesWithAttachmentsInConversationSentBefore( + { + deleteAttachBeforeSeconds, + conversationId, + }: { deleteAttachBeforeSeconds: number; conversationId: GroupPubkeyType }, + instance?: BetterSqlite3.Database +) { + const rows = assertGlobalInstanceOrInstance(instance) + .prepare( + `SELECT json FROM ${MESSAGES_TABLE} WHERE conversationId = $conversationId AND sent_at <= $beforeMs;` + ) + .all({ conversationId, beforeMs: deleteAttachBeforeSeconds * 1000 }); + const messages = map(rows, row => jsonToObject(row.json)); + const messagesWithAttachments = messages.filter(m => { + return getExternalFilesForMessage(m).some(a => !isEmpty(a) && isString(a)); // when we remove an attachment, we set the path to '' so it should be excluded here + }); + return messagesWithAttachments; +} + function removeAllMessagesInConversation( conversationId: string, instance?: BetterSqlite3.Database @@ -1042,6 +1095,70 @@ function removeAllMessagesInConversation( .run({ conversationId }); } +function findAllMessageFromSendersInConversation( + { groupPk, toRemove, signatureTimestamp }: FindAllMessageFromSendersInConversationTypeArgs, + instance?: BetterSqlite3.Database +) { + if (!groupPk || !toRemove.length) { + return { messageHashes: [] }; + } + const rows = assertGlobalInstanceOrInstance(instance) + .prepare( + `SELECT json FROM ${MESSAGES_TABLE} WHERE conversationId = ? AND sent_at <= ? AND source IN ( ${toRemove.map(() => '?').join(', ')} )` + ) + .all(groupPk, signatureTimestamp, ...toRemove); + + if (!rows || isEmpty(rows)) { + return []; + } + return map(rows, row => jsonToObject(row.json)); +} + +function findAllMessageHashesInConversation( + { groupPk, messageHashes, signatureTimestamp }: FindAllMessageHashesInConversationTypeArgs, + instance?: BetterSqlite3.Database +) { + if (!groupPk || !messageHashes.length) { + return []; + } + const rows = compact( + assertGlobalInstanceOrInstance(instance) + .prepare( + `SELECT json FROM ${MESSAGES_TABLE} WHERE conversationId = ? AND sent_at <= ? AND messageHash IN ( ${messageHashes.map(() => '?').join(', ')} )` + ) + .all(groupPk, signatureTimestamp, ...messageHashes) + ); + + if (!rows || isEmpty(rows)) { + return []; + } + return map(rows, row => jsonToObject(row.json)); +} + +function findAllMessageHashesInConversationMatchingAuthor( + { + author, + groupPk, + messageHashes, + signatureTimestamp, + }: FindAllMessageHashesInConversationMatchingAuthorTypeArgs, + instance?: BetterSqlite3.Database +) { + if (!groupPk || !author || !messageHashes.length) { + return { msgHashesDeleted: [], msgIdsDeleted: [] }; + } + const rows = assertGlobalInstanceOrInstance(instance) + .prepare( + `SELECT json FROM ${MESSAGES_TABLE} WHERE conversationId = ? AND source = ? AND sent_at <= ? AND messageHash IN ( ${messageHashes.map(() => '?').join(', ')} );` + ) + .all(groupPk, author, signatureTimestamp, ...messageHashes); + + if (!rows || isEmpty(rows)) { + return null; + } + return map(rows, row => jsonToObject(row.json)); +} + function cleanUpExpirationTimerUpdateHistory( conversationId: string, isPrivate: boolean, @@ -1096,7 +1213,7 @@ function getMessageIdsFromServerIds(serverIds: Array, conversat Sqlite3 doesn't have a good way to have `IN` query with another query. See: https://github.com/mapbox/node-sqlite3/issues/762. - So we have to use templating to insert the values. + So we have to use string templates to insert the values. */ const rows = assertGlobalInstance() .prepare( @@ -1500,7 +1617,7 @@ function hasConversationOutgoingMessage(conversationId: string) { conversationId, }); if (!row) { - throw new Error('hasConversationOutgoingMessage: Unable to get coun'); + throw new Error('hasConversationOutgoingMessage: Unable to get count'); } return Boolean(row['count(*)']); @@ -1697,12 +1814,12 @@ function getNextExpiringMessage() { return map(rows, row => jsonToObject(row.json)); } -/* Unproccessed a received messages not yet processed */ +/* Unprocessed a received messages not yet processed */ const unprocessed: UnprocessedDataNode = { saveUnprocessed: (data: UnprocessedParameter) => { const { id, timestamp, version, attempts, envelope, senderIdentity, messageHash } = data; if (!id) { - throw new Error(`saveUnprocessed: id was falsey: ${id}`); + throw new Error(`saveUnprocessed: id was falsy: ${id}`); } assertGlobalInstance() @@ -1930,7 +2047,7 @@ function getMessagesWithFileAttachments(conversationId: string, limit: number) { function getExternalFilesForMessage(message: any) { const { attachments, quote, preview } = message; - const files: Array = []; + const files: Array = []; forEach(attachments, attachment => { const { path: file, thumbnail, screenshot } = attachment; @@ -1994,6 +2111,24 @@ function getExternalFilesForConversation( return files; } +async function deleteAll({ + userDataPath, + attachments, +}: { + userDataPath: string; + attachments: Array; +}) { + const deleteFromDisk = createDeleter(getAttachmentsPath(userDataPath)); + + for (let index = 0, max = attachments.length; index < max; index += 1) { + const file = attachments[index]; + // eslint-disable-next-line no-await-in-loop + await deleteFromDisk(file); + } + + console.log(`deleteAll: deleted ${attachments.length} files`); +} + function removeKnownAttachments(allAttachments: Array) { const lookup = fromPairs(map(allAttachments, file => [file, true])); const chunkSize = 50; @@ -2136,7 +2271,7 @@ function getLatestClosedGroupEncryptionKeyPair( function addClosedGroupEncryptionKeyPair( groupPublicKey: string, - keypair: object, + keyPair: object, instance?: BetterSqlite3.Database ) { const timestamp = Date.now(); @@ -2156,7 +2291,7 @@ function addClosedGroupEncryptionKeyPair( .run({ groupPublicKey, timestamp, - json: objectToJSON(keypair), + json: objectToJSON(keyPair), }); } @@ -2201,8 +2336,7 @@ function getV2OpenGroupRoom(conversationId: string, db?: BetterSqlite3.Database) return jsonToObject(row.json); } -function saveV2OpenGroupRoom(opengroupsv2Room: OpenGroupV2Room, instance?: BetterSqlite3.Database) { - const { serverUrl, roomId, conversationId } = opengroupsv2Room; +function saveV2OpenGroupRoom(opengroupsV2Room: OpenGroupV2Room, instance?: BetterSqlite3.Database) { assertGlobalInstanceOrInstance(instance) .prepare( `INSERT OR REPLACE INTO ${OPEN_GROUP_ROOMS_V2_TABLE} ( @@ -2218,10 +2352,10 @@ function saveV2OpenGroupRoom(opengroupsv2Room: OpenGroupV2Room, instance?: Bette )` ) .run({ - serverUrl, - roomId, - conversationId, - json: objectToJSON(opengroupsv2Room), + serverUrl: opengroupsV2Room.serverUrl, + roomId: opengroupsV2Room.roomId, + conversationId: opengroupsV2Room.conversationId, + json: objectToJSON(opengroupsV2Room), }); } @@ -2517,8 +2651,13 @@ export const sqlNode = { saveMessages, removeMessage, removeMessagesByIds, + removeAllMessagesInConversationSentBefore, + getAllMessagesWithAttachmentsInConversationSentBefore, cleanUpExpirationTimerUpdateHistory, removeAllMessagesInConversation, + findAllMessageFromSendersInConversation, + findAllMessageHashesInConversation, + findAllMessageHashesInConversationMatchingAuthor, getUnreadByConversation, getUnreadDisappearingByConversation, markAllAsReadByConversationNoExpiration, @@ -2554,7 +2693,7 @@ export const sqlNode = { removeAttachmentDownloadJob, removeAllAttachmentDownloadJobs, removeKnownAttachments, - + deleteAll, removeAll, getMessagesWithVisualMediaAttachments, diff --git a/ts/node/sql_calls/config_dump.ts b/ts/node/sql_calls/config_dump.ts index cfc9d97478..98f1690d53 100644 --- a/ts/node/sql_calls/config_dump.ts +++ b/ts/node/sql_calls/config_dump.ts @@ -3,6 +3,7 @@ */ import { compact, uniq } from 'lodash'; +import { GroupPubkeyType } from 'libsession_util_nodejs'; import { CONFIG_DUMP_TABLE, ConfigDumpDataNode, @@ -10,7 +11,7 @@ import { ConfigDumpRowWithoutData, } from '../../types/sqlSharedTypes'; // eslint-disable-next-line import/no-unresolved, import/extensions -import { ConfigWrapperObjectTypes } from '../../webworker/workers/browser/libsession_worker_functions'; +import { ConfigWrapperObjectTypesMeta } from '../../webworker/workers/browser/libsession_worker_functions'; import { assertGlobalInstance } from '../sqlInstance'; function parseRow( @@ -42,7 +43,7 @@ export function uniqCompacted(list: Array): Array { } export const configDumpData: ConfigDumpDataNode = { - getByVariantAndPubkey: (variant: ConfigWrapperObjectTypes, publicKey: string) => { + getByVariantAndPubkey: (variant: ConfigWrapperObjectTypesMeta, publicKey: string) => { const rows = assertGlobalInstance() .prepare( `SELECT publicKey, variant, data FROM ${CONFIG_DUMP_TABLE} WHERE variant = $variant AND publicKey = $publicKey;` @@ -83,6 +84,18 @@ export const configDumpData: ConfigDumpDataNode = { return compact(rows.map(parseRowNoData)); }, + getAllDumpsWithoutDataFor: (publicKey: string) => { + const rows = assertGlobalInstance() + .prepare(`SELECT variant, publicKey from ${CONFIG_DUMP_TABLE} WHERE publicKey=$publicKey;`) + .all({ publicKey }); + + if (!rows) { + return []; + } + + return compact(rows.map(parseRowNoData)); + }, + saveConfigDump: ({ data, publicKey, variant }: ConfigDumpRow) => { assertGlobalInstance() .prepare( @@ -102,4 +115,9 @@ export const configDumpData: ConfigDumpDataNode = { data, }); }, + deleteDumpFor: (publicKey: GroupPubkeyType) => { + assertGlobalInstance() + .prepare(`DELETE FROM ${CONFIG_DUMP_TABLE} WHERE publicKey=$publicKey;`) + .run({ publicKey }); + }, }; diff --git a/ts/node/sql_channel.ts b/ts/node/sql_channel.ts index 3fbe7c0fe2..5e9f9838a3 100644 --- a/ts/node/sql_channel.ts +++ b/ts/node/sql_channel.ts @@ -13,14 +13,15 @@ export function initializeSqlChannel() { throw new Error('sqlChannels: already initialized!'); } - ipcMain.on(SQL_CHANNEL_KEY, (event, jobId, callName, ...args) => { + // eslint-disable-next-line @typescript-eslint/no-misused-promises + ipcMain.on(SQL_CHANNEL_KEY, async (event, jobId, callName, ...args) => { try { const fn = (sqlNode as any)[callName]; if (!fn) { throw new Error(`sql channel: ${callName} is not an available function`); } - const result = fn(...args); + const result = await fn(...args); event.sender.send(`${SQL_CHANNEL_KEY}-done`, jobId, null, result); } catch (error) { diff --git a/ts/notifications/formatNotifications.ts b/ts/notifications/formatNotifications.ts new file mode 100644 index 0000000000..63c518e534 --- /dev/null +++ b/ts/notifications/formatNotifications.ts @@ -0,0 +1,46 @@ +import { ConversationInteractionStatus, ConversationInteractionType } from '../interactions/types'; +import { ConvoHub } from '../session/conversations'; +import { InteractionNotificationType } from '../state/ducks/types'; +import { assertUnreachable } from '../types/sqlSharedTypes'; + +function formatInteractionNotification( + interactionNotification: InteractionNotificationType, + conversationId: string +) { + const { interactionType, interactionStatus } = interactionNotification; + + // NOTE For now we only show interaction errors in the message history + if (interactionStatus === ConversationInteractionStatus.Error) { + const convo = ConvoHub.use().get(conversationId); + + if (convo) { + const isGroup = !convo.isPrivate(); + const isCommunity = convo.isPublic(); + const conversationName = convo?.getRealSessionUsername() || window.i18n('unknown'); + + switch (interactionType) { + case ConversationInteractionType.Hide: + // there is no text for hiding changes + return ''; + case ConversationInteractionType.Leave: + return isCommunity + ? window.i18n('communityLeaveError', { community_name: conversationName }) + : isGroup + ? window.i18n('groupLeaveErrorFailed', { group_name: conversationName }) + : null; + default: + assertUnreachable( + interactionType, + `Message.getDescription: Missing case error "${interactionType}"` + ); + } + } + } + + window.log.error('formatInteractionNotification: Unsupported case'); + return null; +} + +export const FormatNotifications = { + formatInteractionNotification, +}; diff --git a/ts/react.d.ts b/ts/react.d.ts new file mode 100644 index 0000000000..43b7e429a3 --- /dev/null +++ b/ts/react.d.ts @@ -0,0 +1,238 @@ +import 'react'; + +/** + * WARNING: if you change something here, you will most likely break some integration tests. + * So be sure to check with QA first. + */ + +declare module 'react' { + type SessionDataTestId = + | 'group-member-status-text' + | 'loading-spinner' + | 'session-toast' + | 'loading-animation' + | 'your-session-id' + | 'chooser-new-community' + | 'chooser-new-group' + | 'chooser-new-conversation-button' + | 'new-conversation-button' + | 'message-request-banner' + | 'leftpane-section-container' + | 'group-name-input' + | 'open-url' + | 'recovery-password-seed-modal' + | 'password-input-reconfirm' + | 'conversation-header-subtitle' + | 'password-input' + | 'nickname-input' + | 'image-upload-click' + | 'profile-name-input' + | 'your-profile-name' + | 'edit-profile-dialog' + | 'image-upload-section' + | 'right-panel-group-name' + | 'control-message' + | 'header-conversation-name' + | 'disappear-messages-type-and-time' + | 'message-input' + | 'message-input-text-area' + | 'messages-container' + | 'decline-and-block-message-request' + | 'session-dropdown' + | 'path-light-container' + | 'add-user-button' + | 'back-button-conversation-options' + | 'send-message-button' + | 'scroll-to-bottom-button' + | 'end-call' + | 'modal-close-button' + | 'end-voice-message' + | 'back-button-message-details' + | 'edit-profile-icon' + | 'microphone-button' + | 'call-button' + | 'attachments-button' + | 'invite-warning' + | 'some-of-your-devices-outdated-conversation' + | 'some-of-your-devices-outdated-inbox' + | 'legacy-group-banner' + + // generic button types + | 'emoji-button' + | 'reveal-blocked-user-settings' + + // left pane section types + | 'theme-section' + | 'settings-section' + | 'message-section' + | 'privacy-section' + + // settings menu item types + | 'messageRequests-settings-menu-item' + | 'recoveryPassword-settings-menu-item' + | 'privacy-settings-menu-item' + | 'notifications-settings-menu-item' + | 'conversations-settings-menu-item' + | 'appearance-settings-menu-item' + | 'help-settings-menu-item' + | 'permissions-settings-menu-item' + | 'clearData-settings-menu-item' + + // timer options + | 'time-option-0' + | 'time-option-5' + | 'time-option-10' + | 'time-option-30' + | 'time-option-60' + | 'time-option-300' + | 'time-option-1800' + | 'time-option-3600' + | 'time-option-21600' + | 'time-option-43200' + | 'time-option-86400' + | 'time-option-604800' + | 'time-option-1209600' + + // generic readably message (not control message) + | 'message-content' + + // control message types + | 'message-request-response-message' + | 'interaction-notification' + | 'data-extraction-notification' + | 'group-update-message' + | 'disappear-control-message' + + // subtle control message types + | 'group-request-explanation' + | 'conversation-request-explanation' + | 'group-invite-control-message' + | 'empty-conversation-notification' + | 'group-control-message' + + // call notification types + | 'call-notification-missed-call' + | 'call-notification-started-call' + | 'call-notification-answered-a-call' + + // disappear options + | 'disappear-after-send-option' + | 'disappear-after-read-option' + | 'disappear-legacy-option' + | 'disappear-off-option' + + // settings toggle and buttons + | 'remove-password-settings-button' + | 'change-password-settings-button' + | 'enable-read-receipts' + | 'set-password-button' + | 'enable-read-receipts' + | 'enable-calls' + | 'enable-microphone' + | 'enable-follow-system-theme' + | 'unblock-button-settings-screen' + | 'save-attachment-from-details' + | 'resend-msg-from-details' + | 'reply-to-msg-from-details' + | 'leave-group-button' + | 'disappearing-messages' + | 'group-members' + | 'remove-moderators' + | 'add-moderators' + | 'edit-group-name' + | 'delete-group-button' + + // SessionRadioGroup & SessionRadio + | 'password-input-confirm' + | 'msg-status' + | 'input-device_and_network' + | 'label-device_and_network' + | 'input-device_only' + | 'label-device_only' + | 'input-deleteForEveryone' + | 'label-deleteForEveryone' + | 'input-deleteJustForMe' + | 'label-deleteJustForMe' + | 'input-enterForSend' + | 'label-enterForSend' + | 'input-enterForNewLine' + | 'label-enterForNewLine' + | 'input-message' + | 'label-message' + | 'input-name' + | 'label-name' + | 'input-count' + | 'label-count' + + // links + | 'session-website-link' + | 'session-link-helpdesk' + | 'session-faq-link' + + // to sort + | 'restore-using-recovery' + | 'link-device' + | 'select-contact' + | 'contact' // this is way too generic + | 'contact-status' + | 'version-warning' + | 'open-url-confirm-button' + | 'continue-session-button' + | 'next-new-conversation-button' + | 'reveal-recovery-phrase' + | 'existing-account-button' + | 'create-account-button' + | 'resend-invite-button' + | 'session-confirm-cancel-button' + | 'session-confirm-ok-button' + | 'confirm-nickname' + | 'context-menu-item' + | 'view-qr-code-button' + | 'hide-recovery-password-button' + | 'copy-button-account-id' + | 'path-light-svg' + | 'group-member-name' + | 'privacy-policy-button' + | 'terms-of-service-button' + | 'chooser-invite-friend' + | 'your-account-id' + | 'hide-recovery-phrase-toggle' + | 'reveal-recovery-phrase-toggle' + | 'resend-promote-button' + | 'next-button' + | 'continue-button' + | 'back-button' + | 'empty-conversation' + | 'session-error-message' + | 'hide-input-text-toggle' + | 'show-input-text-toggle' + | 'save-button-profile-update' + | 'save-button-profile-update' + | 'copy-button-profile-update' + | 'disappear-set-button' + | 'decline-message-request' + | 'accept-message-request' + | 'mentions-popup-row' + | 'session-id-signup' + | 'three-dot-loading-animation' + | 'recovery-phrase-input' + | 'display-name-input' + | 'new-session-conversation' + | 'new-closed-group-name' + | 'leftpane-primary-avatar' + | 'img-leftpane-primary-avatar' + | 'conversation-options-avatar' + | 'copy-sender-from-details' + | 'copy-msg-from-details' + | 'block-unblock-modal-description' + // modules profile name + | 'module-conversation__user__profile-name' + | 'module-message-search-result__header__name__profile-name' + | 'module-message__author__profile-name' + | 'module-contact-name__profile-name' + | 'delete-from-details'; + + interface HTMLAttributes { + 'data-testid'?: SessionDataTestId; + } +} diff --git a/ts/receiver/cache.ts b/ts/receiver/cache.ts index b8a060ac54..91050d2078 100644 --- a/ts/receiver/cache.ts +++ b/ts/receiver/cache.ts @@ -1,20 +1,28 @@ import { map, toNumber } from 'lodash'; -import { EnvelopePlus } from './types'; -import { StringUtils } from '../session/utils'; import { Data } from '../data/data'; +import { PubKey } from '../session/types'; +import { StringUtils } from '../session/utils'; import { UnprocessedParameter } from '../types/sqlSharedTypes'; +import { EnvelopePlus } from './types'; -export async function removeFromCache(envelope: Pick) { +async function removeFromCache(envelope: Pick) { return Data.removeUnprocessed(envelope.id); } -export async function addToCache( - envelope: EnvelopePlus, - plaintext: ArrayBuffer, - messageHash: string -) { +function assertNon03Group(envelope: Pick) { + if (PubKey.is03Pubkey(envelope.source)) { + window.log.warn('tried to addtocache message with source:', envelope.source); + // 03 group message keys are handled first. We also block the polling until the current messages are processed (so not updating the corresponding last hash) + // This means that we cannot miss a message from a 03 swarm, and if a message fails to be decrypted/handled, it will keep failing. + // So, there is no need for cache at all for those messages, which is great news as we consider the caching to be legacy code, to be removed asap. + throw new Error('addToCache we do not rely on the caching for 03 group messages'); + } +} + +async function addToCache(envelope: EnvelopePlus, plaintext: ArrayBuffer, messageHash: string) { const { id } = envelope; + assertNon03Group(envelope); const encodedEnvelope = StringUtils.decode(plaintext, 'base64'); const data: UnprocessedParameter = { @@ -70,15 +78,16 @@ async function increaseAttemptsOrRemove( ); } -export async function getAllFromCache() { - window?.log?.info('getAllFromCache'); +async function getAllFromCache() { const items = await fetchAllFromCache(); - window?.log?.info('getAllFromCache loaded', items.length, 'saved envelopes'); + if (items.length) { + window?.log?.info('getAllFromCache loaded', items.length, 'saved envelopes'); + } return increaseAttemptsOrRemove(items); } -export async function getAllFromCacheForSource(source: string) { +async function getAllFromCacheForSource(source: string) { const items = await fetchAllFromCache(); // keep items without source too (for old message already added to the cache) @@ -91,10 +100,15 @@ export async function getAllFromCacheForSource(source: string) { return increaseAttemptsOrRemove(itemsFromSource); } -export async function updateCacheWithDecryptedContent( - envelope: Pick, - plaintext: ArrayBuffer -): Promise { +async function updateCacheWithDecryptedContent({ + envelope, + decryptedContent, +}: { + envelope: Pick; + decryptedContent: ArrayBuffer; +}): Promise { + assertNon03Group(envelope); + const { id, senderIdentity, source } = envelope; const item = await Data.getUnprocessedById(id); if (!item) { @@ -111,7 +125,20 @@ export async function updateCacheWithDecryptedContent( item.senderIdentity = senderIdentity; } - item.decrypted = StringUtils.decode(plaintext, 'base64'); + item.decrypted = StringUtils.decode(decryptedContent, 'base64'); await Data.updateUnprocessedWithData(item.id, item); } + +async function forceEmptyCache() { + await Data.removeAllUnprocessed(); +} + +export const IncomingMessageCache = { + removeFromCache, + addToCache, + updateCacheWithDecryptedContent, + getAllFromCacheForSource, + getAllFromCache, + forceEmptyCache, +}; diff --git a/ts/receiver/callMessage.ts b/ts/receiver/callMessage.ts index 5e3257143a..2ff072fba9 100644 --- a/ts/receiver/callMessage.ts +++ b/ts/receiver/callMessage.ts @@ -1,11 +1,12 @@ import { toNumber } from 'lodash'; import { SignalService } from '../protobuf'; -import { GetNetworkTime } from '../session/apis/snode_api/getNetworkTime'; import { TTL_DEFAULT } from '../session/constants'; import { CallManager, UserUtils } from '../session/utils'; -import { WithMessageHash, WithOptExpireUpdate } from '../session/utils/calling/CallManager'; -import { removeFromCache } from './cache'; +import { WithOptExpireUpdate } from '../session/utils/calling/CallManager'; +import { IncomingMessageCache } from './cache'; import { EnvelopePlus } from './types'; +import { WithMessageHash } from '../session/types/with'; +import { NetworkTime } from '../util/NetworkTime'; // messageHash & messageHash are only needed for actions adding a callMessage to the database (so they expire) export async function handleCallMessage( @@ -27,32 +28,30 @@ export async function handleCallMessage( callMessage.type !== Type.END_CALL ) { window.log.info('Dropping incoming call from ourself'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } if (CallManager.isCallRejected(callMessage.uuid)) { - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); window.log.info(`Dropping already rejected call from this device ${callMessage.uuid}`); return; } if (type === Type.PROVISIONAL_ANSWER || type === Type.PRE_OFFER) { - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } if (type === Type.OFFER) { - if ( - Math.max(sentTimestamp - GetNetworkTime.getNowWithNetworkOffset()) > TTL_DEFAULT.CALL_MESSAGE - ) { + if (Math.max(sentTimestamp - NetworkTime.now()) > TTL_DEFAULT.CALL_MESSAGE) { window?.log?.info('Dropping incoming OFFER callMessage sent a while ago: ', sentTimestamp); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); await CallManager.handleCallTypeOffer(sender, callMessage, sentTimestamp, expireDetails); @@ -60,7 +59,7 @@ export async function handleCallMessage( } if (type === SignalService.CallMessage.Type.END_CALL) { - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); await CallManager.handleCallTypeEndCall(sender, callMessage.uuid); @@ -68,20 +67,20 @@ export async function handleCallMessage( } if (type === SignalService.CallMessage.Type.ANSWER) { - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); await CallManager.handleCallTypeAnswer(sender, callMessage, sentTimestamp, expireDetails); return; } if (type === SignalService.CallMessage.Type.ICE_CANDIDATES) { - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); await CallManager.handleCallTypeIceCandidates(sender, callMessage, sentTimestamp); return; } - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); // if this another type of call message, just add it to the manager await CallManager.handleOtherCallTypes(sender, callMessage, sentTimestamp); diff --git a/ts/receiver/closedGroups.ts b/ts/receiver/closedGroups.ts index f713c8f6f4..8324f09bb9 100644 --- a/ts/receiver/closedGroups.ts +++ b/ts/receiver/closedGroups.ts @@ -1,21 +1,17 @@ -import _, { isNumber, toNumber } from 'lodash'; +import _, { isEmpty, isNumber, toNumber } from 'lodash'; import { Data } from '../data/data'; import { SignalService } from '../protobuf'; -import { getMessageQueue } from '../session'; -import { getConversationController } from '../session/conversations'; -import * as ClosedGroup from '../session/group/closed-group'; +import { ConvoHub } from '../session/conversations'; import { PubKey } from '../session/types'; import { toHex } from '../session/utils/String'; import { BlockedNumberController } from '../util'; -import { removeFromCache } from './cache'; import { decryptWithSessionProtocol } from './contentMessage'; import { EnvelopePlus } from './types'; import { ConversationModel } from '../models/conversation'; import { getSwarmPollingInstance } from '../session/apis/snode_api'; -import { GetNetworkTime } from '../session/apis/snode_api/getNetworkTime'; import { SnodeNamespaces } from '../session/apis/snode_api/namespaces'; import { DisappearingMessageUpdate } from '../session/disappearing_messages/types'; import { ClosedGroupEncryptionPairReplyMessage } from '../session/messages/outgoing/controlMessage/group/ClosedGroupEncryptionPairReplyMessage'; @@ -24,11 +20,16 @@ import { perfEnd, perfStart } from '../session/utils/Performance'; import { ReleasedFeatures } from '../util/releaseFeature'; import { Storage } from '../util/storage'; // eslint-disable-next-line import/no-unresolved, import/extensions -import { ConfigWrapperObjectTypes } from '../webworker/workers/browser/libsession_worker_functions'; +import { ConfigWrapperUser } from '../webworker/workers/browser/libsession_worker_functions'; + +import { ClosedGroup, GroupDiff, GroupInfo } from '../session/group/closed-group'; +import { IncomingMessageCache } from './cache'; import { getSettingsKeyFromLibsessionWrapper } from './configMessage'; import { ECKeyPair, HexKeyPair } from './keypairs'; import { queueAllCachedFromSource } from './receiver'; import { ConversationTypeEnum } from '../models/types'; +import { NetworkTime } from '../util/NetworkTime'; +import { MessageQueue } from '../session/sending'; export const distributingClosedGroupEncryptionKeyPairs = new Map(); @@ -78,7 +79,7 @@ export async function removeAllClosedGroupEncryptionKeyPairs(groupPubKey: string await Data.removeAllClosedGroupEncryptionKeyPairs(groupPubKey); } -export async function handleClosedGroupControlMessage( +export async function handleLegacyClosedGroupControlMessage( envelope: EnvelopePlus, groupUpdate: SignalService.DataMessage.ClosedGroupControlMessage, expireUpdate: DisappearingMessageUpdate | null @@ -91,21 +92,21 @@ export async function handleClosedGroupControlMessage( }` ); - if (PubKey.isClosedGroupV3(envelope.source)) { + if (PubKey.is03Pubkey(envelope.source)) { window?.log?.warn( 'Message ignored; closed group v3 updates cannot come from SignalService.DataMessage.ClosedGroupControlMessage ' ); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } if (BlockedNumberController.isBlocked(PubKey.cast(envelope.source))) { window?.log?.warn('Message ignored; destined for blocked group'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } - // We drop New closed group message from our other devices, as they will come as ConfigurationMessage instead + // We drop New closed group message from our other devices, as they will come through libsession instead if (type === Type.ENCRYPTION_KEY_PAIR) { const isComingFromGroupPubkey = envelope.type === SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE; @@ -114,7 +115,7 @@ export async function handleClosedGroupControlMessage( } if (type === Type.NEW) { if ( - !getConversationController() + !ConvoHub.use() .get(envelope.senderIdentity || envelope.source) ?.isApproved() ) { @@ -139,7 +140,7 @@ export async function handleClosedGroupControlMessage( } window?.log?.error('Unknown group update type: ', type); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } function sanityCheckNewGroup( @@ -172,7 +173,7 @@ function sanityCheckNewGroup( return false; } - if (PubKey.isClosedGroupV3(hexGroupPublicKey)) { + if (PubKey.is03Pubkey(hexGroupPublicKey)) { window?.log?.warn('sanityCheckNewGroup: got a v3 new group as a ClosedGroupControlMessage. '); return false; } @@ -220,7 +221,7 @@ function sanityCheckNewGroup( */ export async function sentAtMoreRecentThanWrapper( envelopeSentAtMs: number, - variant: ConfigWrapperObjectTypes + variant: ConfigWrapperUser ): Promise<'unknown' | 'wrapper_more_recent' | 'envelope_more_recent'> { const userConfigReleased = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); if (!userConfigReleased) { @@ -248,7 +249,7 @@ export async function sentAtMoreRecentThanWrapper( } export async function handleNewClosedGroup( - envelope: EnvelopePlus, + envelope: Omit, groupUpdate: SignalService.DataMessage.ClosedGroupControlMessage ) { if (groupUpdate.type !== SignalService.DataMessage.ClosedGroupControlMessage.Type.NEW) { @@ -256,14 +257,14 @@ export async function handleNewClosedGroup( } if (!sanityCheckNewGroup(groupUpdate)) { window?.log?.warn('Sanity check for newGroup failed, dropping the message...'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } const ourNumber = UserUtils.getOurPubKeyFromCache(); if (envelope.senderIdentity === ourNumber.key) { window?.log?.warn('Dropping new closed group updatemessage from our other device.'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } @@ -288,7 +289,7 @@ export async function handleNewClosedGroup( // not from legacy config, so this is a new closed group deposited on our swarm by a user. // we do not want to process it if our wrapper is more recent that that invite to group envelope. window.log.info('dropping invite to legacy group because our wrapper is more recent'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } @@ -296,15 +297,15 @@ export async function handleNewClosedGroup( window?.log?.info( 'Got a new group message but apparently we are not a member of it. Dropping it.' ); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } - const groupConvo = getConversationController().get(groupId); + const groupConvo = ConvoHub.use().get(groupId); const expireTimer = groupUpdate.expirationTimer; if (groupConvo) { - // if we did not left this group, just add the keypair we got if not already there - if (!groupConvo.get('isKickedFromGroup') && !groupConvo.get('left')) { + // if we did not got kicked this group, just add the keypair we got if not already there + if (!groupConvo.isKickedFromGroup()) { const ecKeyPairAlreadyExistingConvo = new ECKeyPair( encryptionKeyPair!.publicKey, encryptionKeyPair!.privateKey @@ -322,13 +323,13 @@ export async function handleNewClosedGroup( : 'legacy', providedExpireTimer: expireTimer, providedSource: sender, - receivedAt: GetNetworkTime.getNowWithNetworkOffset(), + sentAt: NetworkTime.now(), fromSync: false, fromCurrentDevice: false, fromConfigMessage: false, }); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } // convo exists and we left or got kicked, enable typing and continue processing @@ -343,21 +344,20 @@ export async function handleNewClosedGroup( } const convo = - groupConvo || - (await getConversationController().getOrCreateAndWait(groupId, ConversationTypeEnum.GROUP)); + groupConvo || (await ConvoHub.use().getOrCreateAndWait(groupId, ConversationTypeEnum.GROUP)); // ***** Creating a new group ***** window?.log?.info('Received a new ClosedGroup of id:', groupId); // we don't want the initial "AAA,BBB and You joined the group" // We only set group admins on group creation - const groupDetails: ClosedGroup.GroupInfo = { + const groupDetails: GroupInfo = { id: groupId, name, members, admins, activeAt: envelopeTimestamp, - expirationType: 'unknown', + expirationType: 'unknown', // group creation message, is not expiring expireTimer: 0, }; @@ -382,7 +382,7 @@ export async function handleNewClosedGroup( // start polling for this new group getSwarmPollingInstance().addGroupId(PubKey.cast(groupId)); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); // trigger decrypting of all this group messages we did not decrypt successfully yet. await queueAllCachedFromSource(groupId); } @@ -415,30 +415,30 @@ async function handleClosedGroupEncryptionKeyPair( if (!ourKeyPair) { window?.log?.warn("Couldn't find user X25519 key pair."); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } - const groupConvo = getConversationController().get(groupPublicKey); + const groupConvo = ConvoHub.use().get(groupPublicKey); if (!groupConvo) { window?.log?.warn( `Ignoring closed group encryption key pair for nonexistent group. ${groupPublicKey}` ); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } if (!groupConvo.isClosedGroup()) { window?.log?.warn( `Ignoring closed group encryption key pair for nonexistent medium group. ${groupPublicKey}` ); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } - if (!groupConvo.get('groupAdmins')?.includes(sender)) { + if (!groupConvo.getGroupAdmins().includes(sender)) { window?.log?.warn( `Ignoring closed group encryption key pair from non-admin. ${groupPublicKey}` ); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } @@ -448,27 +448,27 @@ async function handleClosedGroupEncryptionKeyPair( window?.log?.warn( `Couldn't find our wrapper in the encryption keypairs wrappers for group ${groupPublicKey}` ); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } let plaintext: Uint8Array; try { perfStart(`encryptionKeyPair-${envelope.id}`); - const buffer = await decryptWithSessionProtocol( + const decryptedSessionProtocol = await decryptWithSessionProtocol( envelope, ourWrapper.encryptedKeyPair, ECKeyPair.fromKeyPair(ourKeyPair) ); perfEnd(`encryptionKeyPair-${envelope.id}`, 'encryptionKeyPair'); - if (!buffer || buffer.byteLength === 0) { - throw new Error(); + if (!decryptedSessionProtocol || isEmpty(decryptedSessionProtocol.decryptedContent)) { + throw new Error('decryptedSessionProtocol.decryptedContent is empty'); } - plaintext = new Uint8Array(buffer); + plaintext = new Uint8Array(decryptedSessionProtocol.decryptedContent); } catch (e) { window?.log?.warn("Couldn't decrypt closed group encryption key pair.", e); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } @@ -481,7 +481,7 @@ async function handleClosedGroupEncryptionKeyPair( } } catch (e) { window?.log?.warn("Couldn't parse closed group encryption key pair."); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } @@ -490,7 +490,7 @@ async function handleClosedGroupEncryptionKeyPair( keyPair = new ECKeyPair(proto.publicKey, proto.privateKey); } catch (e) { window?.log?.warn("Couldn't parse closed group encryption key pair."); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } window?.log?.info(`Received a new encryptionKeyPair for group ${groupPublicKey}`); @@ -505,11 +505,11 @@ async function handleClosedGroupEncryptionKeyPair( if (isKeyPairAlreadyHere) { window?.log?.info('Dropping already saved keypair for group', groupPublicKey); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } window?.log?.info('Got a new encryption keypair for group', groupPublicKey); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); // trigger decrypting of all this group messages we did not decrypt successfully yet. await queueAllCachedFromSource(groupPublicKey); } @@ -524,29 +524,29 @@ async function performIfValid( const groupPublicKey = envelope.source; const sender = envelope.senderIdentity; - if (PubKey.isClosedGroupV3(groupPublicKey)) { + if (PubKey.is03Pubkey(groupPublicKey)) { window?.log?.warn( 'Message ignored; closed group v3 updates cannot come from SignalService.DataMessage.ClosedGroupControlMessage ' ); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } - const convo = getConversationController().get(groupPublicKey); + const convo = ConvoHub.use().get(groupPublicKey); if (!convo) { window?.log?.warn('dropping message for nonexistent group'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } if (!convo) { window?.log?.warn('Ignoring a closed group update message (INFO) for a non-existing group'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } // Check that the message isn't from before the group was created - let lastJoinedTimestamp = convo.get('lastJoinedTimestamp'); + let lastJoinedTimestamp = convo.getLastJoinedTimestamp(); // might happen for existing groups if (!lastJoinedTimestamp) { const aYearAgo = Date.now() - 1000 * 60 * 24 * 365; @@ -561,21 +561,21 @@ async function performIfValid( window?.log?.warn( 'Got a group update with an older timestamp than when we joined this group last time. Dropping it.' ); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } // Check that the sender is a member of the group (before the update) - const oldMembers = convo.get('members') || []; + const oldMembers = convo.getGroupMembers() || []; if (!oldMembers.includes(sender)) { window?.log?.error( `Error: closed group: ignoring closed group update message from non-member. ${sender} is not a current member.` ); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } // make sure the conversation with this user exist (even if it's just hidden) - await getConversationController().getOrCreateAndWait(sender, ConversationTypeEnum.PRIVATE); + await ConvoHub.use().getOrCreateAndWait(sender, ConversationTypeEnum.PRIVATE); const moreRecentOrNah = await sentAtMoreRecentThanWrapper(envelopeTimestamp, 'UserGroupsConfig'); const shouldNotApplyGroupChange = moreRecentOrNah === 'wrapper_more_recent'; @@ -607,7 +607,7 @@ async function performIfValid( } else if (groupUpdate.type === Type.MEMBER_LEFT) { await handleClosedGroupMemberLeft(envelope, convo, shouldNotApplyGroupChange, expireUpdate); } else if (groupUpdate.type === Type.ENCRYPTION_KEY_PAIR_REQUEST) { - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } // if you add a case here, remember to add it where performIfValid is called too. } @@ -623,9 +623,10 @@ async function handleClosedGroupNameChanged( const newName = groupUpdate.name; window?.log?.info(`Got a group update for group ${envelope.source}, type: NAME_CHANGED`); - if (newName !== convo.get('displayNameInProfile')) { - const groupDiff: ClosedGroup.GroupDiff = { + if (newName !== convo.getRealSessionUsername()) { + const groupDiff: GroupDiff = { newName, + type: 'name', }; await ClosedGroup.addUpdateMessage({ convo, @@ -633,6 +634,7 @@ async function handleClosedGroupNameChanged( sender: envelope.senderIdentity, sentAt: toNumber(envelope.timestamp), expireUpdate, + markAlreadySent: false, // legacy groups support will be removed eventually }); if (!shouldOnlyAddUpdateMessage) { convo.set({ displayNameInProfile: newName }); @@ -641,7 +643,7 @@ async function handleClosedGroupNameChanged( await convo.commit(); } - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } async function handleClosedGroupMembersAdded( @@ -653,8 +655,10 @@ async function handleClosedGroupMembersAdded( ) { const { members: addedMembersBinary } = groupUpdate; const addedMembers = (addedMembersBinary || []).map(toHex); - const oldMembers = convo.get('members') || []; - const membersNotAlreadyPresent = addedMembers.filter(m => !oldMembers.includes(m)); + const oldMembers = convo.getGroupMembers() || []; + const membersNotAlreadyPresent = addedMembers + .filter(m => !oldMembers.includes(m)) + .filter(PubKey.is05Pubkey); window?.log?.info(`Got a group update for group ${envelope.source}, type: MEMBERS_ADDED`); // make sure those members are not on our zombie list @@ -667,7 +671,7 @@ async function handleClosedGroupMembersAdded( // this is just to make sure that the zombie list got written to the db. // if a member adds a member we have as a zombie, we consider that this member is not a zombie anymore await convo.commit(); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } @@ -680,13 +684,13 @@ async function handleClosedGroupMembersAdded( const members = [...oldMembers, ...membersNotAlreadyPresent]; // make sure the conversation with those members (even if it's just hidden) await Promise.all( - members.map(async m => - getConversationController().getOrCreateAndWait(m, ConversationTypeEnum.PRIVATE) - ) + members.map(async m => ConvoHub.use().getOrCreateAndWait(m, ConversationTypeEnum.PRIVATE)) ); - const groupDiff: ClosedGroup.GroupDiff = { - joiningMembers: membersNotAlreadyPresent, + const groupDiff: GroupDiff = { + type: 'add', + added: membersNotAlreadyPresent, + withHistory: false, }; await ClosedGroup.addUpdateMessage({ convo, @@ -694,6 +698,7 @@ async function handleClosedGroupMembersAdded( sender: envelope.senderIdentity, sentAt: toNumber(envelope.timestamp), expireUpdate, + markAlreadySent: false, // legacy groups support will be removed eventually }); if (!shouldOnlyAddUpdateMessage) { @@ -702,7 +707,7 @@ async function handleClosedGroupMembersAdded( convo.updateLastMessage(); await convo.commit(); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } async function areWeAdmin(groupConvo: ConversationModel) { @@ -710,7 +715,7 @@ async function areWeAdmin(groupConvo: ConversationModel) { throw new Error('areWeAdmin needs a convo'); } - const groupAdmins = groupConvo.get('groupAdmins'); + const groupAdmins = groupConvo.getGroupAdmins(); const ourNumber = UserUtils.getOurPubKeyStrFromCache(); return groupAdmins?.includes(ourNumber) || false; } @@ -722,18 +727,23 @@ async function handleClosedGroupMembersRemoved( shouldOnlyAddUpdateMessage: boolean, // set this to true to not apply the change to the convo itself, just add the update in the conversation expireUpdate: DisappearingMessageUpdate | null ) { + if (convo.isClosedGroupV2()) { + throw new Error('legacy group method called with 03 group'); + } // Check that the admin wasn't removed - const currentMembers = convo.get('members'); + const currentMembers = convo.getGroupMembers(); // removedMembers are all members in the diff const removedMembers = groupUpdate.members.map(toHex); // effectivelyRemovedMembers are the members which where effectively on this group before the update // and is used for the group update message only - const effectivelyRemovedMembers = removedMembers.filter(m => currentMembers.includes(m)); + const effectivelyRemovedMembers = removedMembers + .filter(m => currentMembers.includes(m)) + .filter(PubKey.is05Pubkey); const groupPubKey = envelope.source; window?.log?.info(`Got a group update for group ${envelope.source}, type: MEMBERS_REMOVED`); const membersAfterUpdate = _.difference(currentMembers, removedMembers); - const groupAdmins = convo.get('groupAdmins'); + const groupAdmins = convo.getGroupAdmins(); if (!groupAdmins?.length) { throw new Error('No admins found for closed group member removed update.'); } @@ -741,14 +751,14 @@ async function handleClosedGroupMembersRemoved( if (removedMembers.includes(firstAdmin)) { window?.log?.warn('Ignoring invalid closed group update: trying to remove the admin.'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); throw new Error('Admins cannot be removed. They can only leave'); } // The MEMBERS_REMOVED message type can only come from an admin. if (!groupAdmins.includes(envelope.senderIdentity)) { window?.log?.warn('Ignoring invalid closed group update. Only admins can remove members.'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); throw new Error('Only admins can remove members.'); } @@ -758,8 +768,11 @@ async function handleClosedGroupMembersRemoved( const ourPubKey = UserUtils.getOurPubKeyFromCache(); const wasCurrentUserKicked = !membersAfterUpdate.includes(ourPubKey.key); if (wasCurrentUserKicked) { + if (!PubKey.is05Pubkey(groupPubKey)) { + throw new Error('handleClosedGroupMembersRemoved expected a 05 groupPk'); + } // we now want to remove everything related to a group when we get kicked from it. - await getConversationController().deleteClosedGroup(groupPubKey, { + await ConvoHub.use().deleteLegacyGroup(groupPubKey, { fromSyncMessage: false, sendLeaveMessage: false, }); @@ -769,8 +782,9 @@ async function handleClosedGroupMembersRemoved( // Only add update message if we have something to show if (membersAfterUpdate.length !== currentMembers.length) { - const groupDiff: ClosedGroup.GroupDiff = { - kickedMembers: effectivelyRemovedMembers, + const groupDiff: GroupDiff = { + type: 'kicked', + kicked: effectivelyRemovedMembers, }; await ClosedGroup.addUpdateMessage({ convo, @@ -778,12 +792,13 @@ async function handleClosedGroupMembersRemoved( sender: envelope.senderIdentity, sentAt: toNumber(envelope.timestamp), expireUpdate, + markAlreadySent: false, // legacy groups support will be removed eventually }); convo.updateLastMessage(); } // Update the group - const zombies = convo.get('zombies').filter(z => membersAfterUpdate.includes(z)); + const zombies = convo.getGroupZombies().filter(z => membersAfterUpdate.includes(z)); if (!shouldOnlyAddUpdateMessage) { convo.set({ members: membersAfterUpdate }); @@ -791,11 +806,11 @@ async function handleClosedGroupMembersRemoved( } await convo.commit(); } - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } function isUserAZombie(convo: ConversationModel, user: PubKey) { - return convo.get('zombies').includes(user.key); + return convo.getGroupZombies().includes(user.key); } /** @@ -807,7 +822,7 @@ function addMemberToZombies( userToAdd: PubKey, convo: ConversationModel ): boolean { - const zombies = convo.get('zombies'); + const zombies = convo.getGroupZombies(); const isAlreadyZombie = isUserAZombie(convo, userToAdd); if (isAlreadyZombie) { @@ -827,7 +842,7 @@ function removeMemberFromZombies( userToAdd: PubKey, convo: ConversationModel ): boolean { - const zombies = convo.get('zombies'); + const zombies = convo.getGroupZombies(); const isAlreadyAZombie = isUserAZombie(convo, userToAdd); if (!isAlreadyAZombie) { @@ -841,21 +856,27 @@ function removeMemberFromZombies( } async function handleClosedGroupAdminMemberLeft(groupPublicKey: string, envelope: EnvelopePlus) { + if (!PubKey.is05Pubkey(groupPublicKey)) { + throw new Error('handleClosedGroupAdminMemberLeft excepted a 05 groupPk'); + } // if the admin was remove and we are the admin, it can only be voluntary - await getConversationController().deleteClosedGroup(groupPublicKey, { + await ConvoHub.use().deleteLegacyGroup(groupPublicKey, { fromSyncMessage: false, sendLeaveMessage: false, }); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } async function handleClosedGroupLeftOurself(groupId: string, envelope: EnvelopePlus) { + if (!PubKey.is05Pubkey(groupId)) { + throw new Error('handleClosedGroupLeftOurself excepted a 05 groupPk'); + } // if we ourself left. It can only mean that another of our device left the group and we just synced that message through the swarm - await getConversationController().deleteClosedGroup(groupId, { + await ConvoHub.use().deleteLegacyGroup(groupId, { fromSyncMessage: false, sendLeaveMessage: false, }); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } async function handleClosedGroupMemberLeft( @@ -865,11 +886,16 @@ async function handleClosedGroupMemberLeft( expireUpdate: DisappearingMessageUpdate | null ) { const sender = envelope.senderIdentity; + + if (!PubKey.is05Pubkey(sender)) { + throw new Error('groupmember left sender should be a 05 pk'); + } + const groupPublicKey = envelope.source; - const didAdminLeave = convo.get('groupAdmins')?.includes(sender) || false; + const didAdminLeave = convo.getGroupAdmins().includes(sender) || false; // If the admin leaves the group is disbanded // otherwise, we remove the sender from the list of current members in this group - const oldMembers = convo.get('members') || []; + const oldMembers = convo.getGroupMembers() || []; const newMembers = oldMembers.filter(s => s !== sender); window?.log?.info(`Got a group update for group ${envelope.source}, type: MEMBER_LEFT`); @@ -894,8 +920,9 @@ async function handleClosedGroupMemberLeft( // Another member left, not us, not the admin, just another member. // But this member was in the list of members (as performIfValid checks for that) - const groupDiff: ClosedGroup.GroupDiff = { - leavingMembers: [sender], + const groupDiff: GroupDiff = { + type: 'left', + left: [sender], }; await ClosedGroup.addUpdateMessage({ @@ -904,6 +931,7 @@ async function handleClosedGroupMemberLeft( sender: envelope.senderIdentity, sentAt: toNumber(envelope.timestamp), expireUpdate, + markAlreadySent: false, // legacy groups support will be removed eventually }); convo.updateLastMessage(); // if a user just left and we are the admin, we remove him right away for everyone by sending a MEMBERS_REMOVED message so no need to add him as a zombie @@ -916,7 +944,7 @@ async function handleClosedGroupMemberLeft( await convo.commit(); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } async function sendLatestKeyPairToUsers( @@ -939,23 +967,23 @@ async function sendLatestKeyPairToUsers( await Promise.all( targetUsers.map(async member => { window?.log?.info(`Sending latest closed group encryption key pair to: ${member}`); - await getConversationController().getOrCreateAndWait(member, ConversationTypeEnum.PRIVATE); + await ConvoHub.use().getOrCreateAndWait(member, ConversationTypeEnum.PRIVATE); const wrappers = await ClosedGroup.buildEncryptionKeyPairWrappers([member], keyPairToUse); const keypairsMessage = new ClosedGroupEncryptionPairReplyMessage({ groupId: groupPubKey, - timestamp: Date.now(), + createAtNetworkTimestamp: NetworkTime.now(), encryptedKeyPairs: wrappers, expirationType: null, // we keep that one **not** expiring (not rendered in the clients, and we need it to be as available as possible on the swarm) expireTimer: null, }); // the encryption keypair is sent using established channels - await getMessageQueue().sendToPubKey( + await MessageQueue.use().sendToPubKey( PubKey.cast(member), keypairsMessage, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); }) ); diff --git a/ts/receiver/configMessage.ts b/ts/receiver/configMessage.ts index bf5ef17d42..e89ff7a948 100644 --- a/ts/receiver/configMessage.ts +++ b/ts/receiver/configMessage.ts @@ -1,107 +1,126 @@ /* eslint-disable no-await-in-loop */ -import { ContactInfo } from 'libsession_util_nodejs'; +import { ContactInfo, GroupPubkeyType, UserGroupsGet } from 'libsession_util_nodejs'; import { compact, difference, isEmpty, isNil, isNumber, toNumber } from 'lodash'; import { ConfigDumpData } from '../data/configDump/configDump'; -import { Data } from '../data/data'; import { SettingsKey } from '../data/settings-key'; -import { ConversationInteraction } from '../interactions'; import { deleteAllMessagesByConvoIdNoConfirmation } from '../interactions/conversationInteractions'; -import { SignalService } from '../protobuf'; import { ClosedGroup } from '../session'; -import { - joinOpenGroupV2WithUIEvents, - parseOpenGroupV2, -} from '../session/apis/open_group_api/opengroupV2/JoinOpenGroupV2'; import { getOpenGroupManager } from '../session/apis/open_group_api/opengroupV2/OpenGroupManagerV2'; import { OpenGroupUtils } from '../session/apis/open_group_api/utils'; import { getOpenGroupV2ConversationId } from '../session/apis/open_group_api/utils/OpenGroupUtils'; import { getSwarmPollingInstance } from '../session/apis/snode_api'; -import { getConversationController } from '../session/conversations'; -import { Profile, ProfileManager } from '../session/profile_manager/ProfileManager'; +import { ConvoHub } from '../session/conversations'; +import { ProfileManager } from '../session/profile_manager/ProfileManager'; import { PubKey } from '../session/types'; import { StringUtils, UserUtils } from '../session/utils'; import { toHex } from '../session/utils/String'; -import { ConfigurationSync } from '../session/utils/job_runners/jobs/ConfigurationSyncJob'; -import { FetchMsgExpirySwarm } from '../session/utils/job_runners/jobs/FetchMsgExpirySwarmJob'; // eslint-disable-next-line import/no-unresolved, import/extensions -import { IncomingConfResult, LibSessionUtil } from '../session/utils/libsession/libsession_utils'; +import { FetchMsgExpirySwarm } from '../session/utils/job_runners/jobs/FetchMsgExpirySwarmJob'; +import { UserSync } from '../session/utils/job_runners/jobs/UserSyncJob'; +import { LibSessionUtil } from '../session/utils/libsession/libsession_utils'; import { SessionUtilContact } from '../session/utils/libsession/libsession_utils_contacts'; import { SessionUtilConvoInfoVolatile } from '../session/utils/libsession/libsession_utils_convo_info_volatile'; import { SessionUtilUserGroups } from '../session/utils/libsession/libsession_utils_user_groups'; import { configurationMessageReceived, trigger } from '../shims/events'; import { getCurrentlySelectedConversationOutsideRedux } from '../state/selectors/conversations'; -import { assertUnreachable } from '../types/sqlSharedTypes'; +import { assertUnreachable, stringify, toFixedUint8ArrayOfLength } from '../types/sqlSharedTypes'; import { BlockedNumberController } from '../util'; -import { Registration } from '../util/registration'; +import { Storage, setLastProfileUpdateTimestamp } from '../util/storage'; +// eslint-disable-next-line import/no-unresolved, import/extensions +import { HexString } from '../node/hexStrings'; +import { + SnodeNamespace, + SnodeNamespaces, + SnodeNamespacesUserConfig, +} from '../session/apis/snode_api/namespaces'; +import { RetrieveMessageItemWithNamespace } from '../session/apis/snode_api/types'; +import { GroupInfo } from '../session/group/closed-group'; +import { groupInfoActions } from '../state/ducks/metaGroups'; +import { + ConfigWrapperObjectTypesMeta, + ConfigWrapperUser, + getGroupPubkeyFromWrapperType, + isBlindingWrapperType, + isMultiEncryptWrapperType, + isUserConfigWrapperType, +} from '../webworker/workers/browser/libsession_worker_functions'; +// eslint-disable-next-line import/no-unresolved, import/extensions +import { Data } from '../data/data'; import { ReleasedFeatures } from '../util/releaseFeature'; -import { Storage, isSignInByLinking, setLastProfileUpdateTimestamp } from '../util/storage'; -import { SnodeNamespaces } from '../session/apis/snode_api/namespaces'; -import { RetrieveMessageItemWithNamespace } from '../session/apis/snode_api/types'; // eslint-disable-next-line import/no-unresolved -import { ConfigWrapperObjectTypes } from '../webworker/workers/browser/libsession_worker_functions'; import { ContactsWrapperActions, ConvoInfoVolatileWrapperActions, GenericWrapperActions, + MetaGroupWrapperActions, UserConfigWrapperActions, UserGroupsWrapperActions, } from '../webworker/workers/browser/libsession_worker_interface'; -import { removeFromCache } from './cache'; import { addKeyPairToCacheAndDBIfNeeded } from './closedGroups'; import { HexKeyPair } from './keypairs'; import { queueAllCachedFromSource } from './receiver'; -import { EnvelopePlus } from './types'; -import { ConversationTypeEnum, CONVERSATION_PRIORITIES } from '../models/types'; + import { CONVERSATION } from '../session/constants'; +import { CONVERSATION_PRIORITIES, ConversationTypeEnum } from '../models/types'; + +type IncomingUserResult = { + needsPush: boolean; + needsDump: boolean; + publicKey: string; + latestEnvelopeTimestamp: number; + namespace: SnodeNamespacesUserConfig; +}; -function groupByNamespace(incomingConfigs: Array) { +function byUserNamespace(incomingConfigs: Array) { const groupedByVariant: Map< - ConfigWrapperObjectTypes, + SnodeNamespacesUserConfig, Array > = new Map(); incomingConfigs.forEach(incomingConfig => { const { namespace } = incomingConfig; - - const wrapperId: ConfigWrapperObjectTypes | null = - namespace === SnodeNamespaces.UserProfile - ? 'UserConfig' - : namespace === SnodeNamespaces.UserContacts - ? 'ContactsConfig' - : namespace === SnodeNamespaces.UserGroups - ? 'UserGroupsConfig' - : namespace === SnodeNamespaces.ConvoInfoVolatile - ? 'ConvoInfoVolatileConfig' - : null; - - if (!wrapperId) { - throw new Error('Unexpected wrapperId'); + if (!SnodeNamespace.isUserConfigNamespace(namespace)) { + throw new Error(`Invalid namespace on byUserNamespace: ${namespace}`); } - if (!groupedByVariant.has(wrapperId)) { - groupedByVariant.set(wrapperId, []); + if (!groupedByVariant.has(namespace)) { + groupedByVariant.set(namespace, []); } - groupedByVariant.get(wrapperId)?.push(incomingConfig); + groupedByVariant.get(namespace)?.push(incomingConfig); }); return groupedByVariant; } -async function mergeConfigsWithIncomingUpdates( +async function printDumpForDebug(prefix: string, variant: ConfigWrapperObjectTypesMeta) { + if (isUserConfigWrapperType(variant)) { + window.log.info(prefix, StringUtils.toHex(await GenericWrapperActions.makeDump(variant))); + return; + } + if (isMultiEncryptWrapperType(variant) || isBlindingWrapperType(variant)) { + return; // nothing to print for this one + } + const metaGroupDumps = await MetaGroupWrapperActions.metaMakeDump( + getGroupPubkeyFromWrapperType(variant) + ); + window.log.info(prefix, StringUtils.toHex(metaGroupDumps)); +} + +async function mergeUserConfigsWithIncomingUpdates( incomingConfigs: Array -): Promise> { - // first, group by variant so we do a single merge call - const groupedByNamespace = groupByNamespace(incomingConfigs); +): Promise> { + // first, group by namespaces so we do a single merge call + // Note: this call throws if given a non user kind as this function should only handle user variants/kinds + const groupedByNamespaces = byUserNamespace(incomingConfigs); - const groupedResults: Map = new Map(); + const groupedResults: Map = new Map(); - // TODOLATER currently we only poll for user config messages, so this can be hardcoded - const publicKey = UserUtils.getOurPubKeyStrFromCache(); + const us = UserUtils.getOurPubKeyStrFromCache(); try { - for (let index = 0; index < groupedByNamespace.size; index++) { - const variant = [...groupedByNamespace.keys()][index]; - const sameVariant = groupedByNamespace.get(variant); + for (let index = 0; index < groupedByNamespaces.size; index++) { + const namespace = [...groupedByNamespaces.keys()][index]; + const sameVariant = groupedByNamespaces.get(namespace); if (!sameVariant?.length) { continue; } @@ -109,44 +128,37 @@ async function mergeConfigsWithIncomingUpdates( data: StringUtils.fromBase64ToArray(msg.data), hash: msg.hash, })); + + const variant = LibSessionUtil.userNamespaceToVariant(namespace); + if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { - window.log.info( - `printDumpsForDebugging: before merge of ${variant}:`, - StringUtils.toHex(await GenericWrapperActions.dump(variant)) + await printDumpForDebug( + `printDumpsForDebugging: before merge of ${toMerge.length}, ${variant}:`, + variant ); - - for (let dumpIndex = 0; dumpIndex < toMerge.length; dumpIndex++) { - const element = toMerge[dumpIndex]; - window.log.info( - `printDumpsForDebugging: toMerge of ${dumpIndex}:${element.hash}: ${element.data} `, - StringUtils.toHex(await GenericWrapperActions.dump(variant)) - ); - } } const hashesMerged = await GenericWrapperActions.merge(variant, toMerge); - const needsPush = await GenericWrapperActions.needsPush(variant); + const needsDump = await GenericWrapperActions.needsDump(variant); + const needsPush = await GenericWrapperActions.needsPush(variant); const mergedTimestamps = sameVariant .filter(m => hashesMerged.includes(m.hash)) - .map(m => m.timestamp); + .map(m => m.storedAt); const latestEnvelopeTimestamp = Math.max(...mergedTimestamps); window.log.debug( - `${variant}: "${publicKey}" needsPush:${needsPush} needsDump:${needsDump}; mergedCount:${hashesMerged.length}` + `${variant}: needsPush:${needsPush} needsDump:${needsDump}; mergedCount:${hashesMerged.length} ` ); if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { - window.log.info( - `printDumpsForDebugging: after merge of ${variant}:`, - StringUtils.toHex(await GenericWrapperActions.dump(variant)) - ); + await printDumpForDebug(`printDumpsForDebugging: after merge of ${variant}:`, variant); } - const incomingConfResult: IncomingConfResult = { + const incomingConfResult: IncomingUserResult = { needsDump, needsPush, - kind: LibSessionUtil.variantToKind(variant), - publicKey, + publicKey: us, + namespace, latestEnvelopeTimestamp: latestEnvelopeTimestamp || Date.now(), }; groupedResults.set(variant, incomingConfResult); @@ -160,8 +172,13 @@ async function mergeConfigsWithIncomingUpdates( } export function getSettingsKeyFromLibsessionWrapper( - wrapperType: ConfigWrapperObjectTypes + wrapperType: ConfigWrapperObjectTypesMeta ): string | null { + if (!isUserConfigWrapperType(wrapperType)) { + throw new Error( + `getSettingsKeyFromLibsessionWrapper only cares about user variants but got ${wrapperType}` + ); + } switch (wrapperType) { case 'UserConfig': return SettingsKey.latestUserProfileEnvelopeTimestamp; @@ -185,7 +202,7 @@ export function getSettingsKeyFromLibsessionWrapper( } async function updateLibsessionLatestProcessedUserTimestamp( - wrapperType: ConfigWrapperObjectTypes, + wrapperType: ConfigWrapperUser, latestEnvelopeTimestamp: number ) { const settingsKey = getSettingsKeyFromLibsessionWrapper(wrapperType); @@ -207,12 +224,12 @@ async function updateLibsessionLatestProcessedUserTimestamp( * NOTE When adding new properties to the wrapper, don't update the conversation model here because the merge has not been done yet. * Instead you will need to updateOurProfileLegacyOrViaLibSession() to support them */ -async function handleUserProfileUpdate(result: IncomingConfResult): Promise { +async function handleUserProfileUpdate(result: IncomingUserResult): Promise { const profilePic = await UserConfigWrapperActions.getProfilePic(); const displayName = await UserConfigWrapperActions.getName(); const priority = await UserConfigWrapperActions.getPriority(); if (!profilePic || isEmpty(profilePic)) { - return result; + return; } const currentBlindedMsgRequest = Storage.get(SettingsKey.hasBlindedMsgRequestsEnabled); @@ -221,10 +238,14 @@ async function handleUserProfileUpdate(result: IncomingConfResult): Promise) { - const allContactsInDBWhichShouldBeInWrapperIds = getConversationController() + const allContactsInDBWhichShouldBeInWrapperIds = ConvoHub.use() .getConversations() .filter(SessionUtilContact.isContactToStoreInWrapper) .map(m => m.id as string); const currentlySelectedConversationId = getCurrentlySelectedConversationOutsideRedux(); const currentlySelectedConvo = currentlySelectedConversationId - ? getConversationController().get(currentlySelectedConversationId) + ? ConvoHub.use().get(currentlySelectedConversationId) : undefined; // we might have some contacts not in the wrapper anymore, so let's clean things up. @@ -329,9 +348,10 @@ async function deleteContactsFromDB(contactsToRemove: Array) { for (let index = 0; index < contactsToRemove.length; index++) { const contactToRemove = contactsToRemove[index]; try { - await getConversationController().delete1o1(contactToRemove, { + await ConvoHub.use().delete1o1(contactToRemove, { fromSyncMessage: true, justHidePrivate: false, + keepMessages: false, }); } catch (e) { window.log.warn( @@ -342,7 +362,7 @@ async function deleteContactsFromDB(contactsToRemove: Array) { } } -async function handleContactsUpdate(result: IncomingConfResult): Promise { +async function handleContactsUpdate(result: IncomingUserResult) { const us = UserUtils.getOurPubKeyStrFromCache(); const allContactsInWrapper = await ContactsWrapperActions.getAll(); @@ -357,7 +377,7 @@ async function handleContactsUpdate(result: IncomingConfResult): Promise m.fullUrlWithPubkey) ); - const allCommunitiesConversation = getConversationController() + const allCommunitiesConversation = ConvoHub.use() .getConversations() .filter(SessionUtilUserGroups.isCommunityToStoreInWrapper); @@ -487,7 +506,7 @@ async function handleCommunitiesUpdate() { for (let index = 0; index < communitiesToLeaveInDB.length; index++) { const toLeave = communitiesToLeaveInDB[index]; window.log.info('leaving community with convoId ', toLeave.id); - await getConversationController().deleteCommunity(toLeave.id, { + await ConvoHub.use().deleteCommunity(toLeave.id, { fromSyncMessage: true, }); } @@ -519,7 +538,7 @@ async function handleCommunitiesUpdate() { fromWrapper.roomCasePreserved ); - const communityConvo = getConversationController().get(convoId); + const communityConvo = ConvoHub.use().get(convoId); if (fromWrapper && communityConvo) { let changes = false; @@ -537,7 +556,7 @@ async function handleCommunitiesUpdate() { async function handleLegacyGroupUpdate(latestEnvelopeTimestamp: number) { // first let's check which closed groups needs to be joined or left by doing a diff of what is in the wrapper and what is in the DB const allLegacyGroupsInWrapper = await UserGroupsWrapperActions.getAllLegacyGroups(); - const allLegacyGroupsInDb = getConversationController() + const allLegacyGroupsInDb = ConvoHub.use() .getConversations() .filter(SessionUtilUserGroups.isLegacyGroupToRemoveFromDBIfNotInWrapper); @@ -568,12 +587,14 @@ async function handleLegacyGroupUpdate(latestEnvelopeTimestamp: number) { 'leaving legacy group from configuration sync message with convoId ', toLeave.id ); - const toLeaveFromDb = getConversationController().get(toLeave.id); - // the wrapper told us that this group is not tracked, so even if we left/got kicked from it, remove it from the DB completely - await getConversationController().deleteClosedGroup(toLeaveFromDb.id, { - fromSyncMessage: true, - sendLeaveMessage: false, // this comes from the wrapper, so we must have left/got kicked from that group already and our device already handled it. - }); + const toLeaveFromDb = ConvoHub.use().get(toLeave.id); + if (PubKey.is05Pubkey(toLeaveFromDb.id)) { + // the wrapper told us that this group is not tracked, so even if we left/got kicked from it, remove it from the DB completely + await ConvoHub.use().deleteLegacyGroup(toLeaveFromDb.id, { + fromSyncMessage: true, + sendLeaveMessage: false, // this comes from the wrapper, so we must have left/got kicked from that group already and our device already handled it. + }); + } } for (let index = 0; index < legacyGroupsToJoinInDB.length; index++) { @@ -584,16 +605,13 @@ async function handleLegacyGroupUpdate(latestEnvelopeTimestamp: number) { ); // let's just create the required convo here, as we update the fields right below - await getConversationController().getOrCreateAndWait( - toJoin.pubkeyHex, - ConversationTypeEnum.GROUP - ); + await ConvoHub.use().getOrCreateAndWait(toJoin.pubkeyHex, ConversationTypeEnum.GROUP); } for (let index = 0; index < allLegacyGroupsInWrapper.length; index++) { const fromWrapper = allLegacyGroupsInWrapper[index]; - const legacyGroupConvo = getConversationController().get(fromWrapper.pubkeyHex); + const legacyGroupConvo = ConvoHub.use().get(fromWrapper.pubkeyHex); if (!legacyGroupConvo) { // this should not happen as we made sure to create them before window.log.warn( @@ -605,14 +623,13 @@ async function handleLegacyGroupUpdate(latestEnvelopeTimestamp: number) { const members = fromWrapper.members.map(m => m.pubkeyHex); const admins = fromWrapper.members.filter(m => m.isAdmin).map(m => m.pubkeyHex); - const creationTimestamp = fromWrapper.joinedAtSeconds ? fromWrapper.joinedAtSeconds * 1000 : CONVERSATION.LAST_JOINED_FALLBACK_TIMESTAMP; // then for all the existing legacy group in the wrapper, we need to override the field of what we have in the DB with what is in the wrapper // We only set group admins on group creation - const groupDetails: ClosedGroup.GroupInfo = { + const groupDetails: GroupInfo = { id: fromWrapper.pubkeyHex, name: fromWrapper.name, members, @@ -638,7 +655,7 @@ async function handleLegacyGroupUpdate(latestEnvelopeTimestamp: number) { : 'off', providedExpireTimer: fromWrapper.disappearingTimerSeconds, providedSource: legacyGroupConvo.id, - receivedAt: latestEnvelopeTimestamp, + sentAt: latestEnvelopeTimestamp, fromSync: true, shouldCommitConvo: false, fromCurrentDevice: false, @@ -647,7 +664,7 @@ async function handleLegacyGroupUpdate(latestEnvelopeTimestamp: number) { changes = success; } - const existingTimestampMs = legacyGroupConvo.get('lastJoinedTimestamp'); + const existingTimestampMs = legacyGroupConvo.getLastJoinedTimestamp(); const existingJoinedAtSeconds = Math.floor(existingTimestampMs / 1000); if (existingJoinedAtSeconds !== creationTimestamp) { legacyGroupConvo.set({ @@ -655,11 +672,11 @@ async function handleLegacyGroupUpdate(latestEnvelopeTimestamp: number) { }); changes = true; } - // start polling for this group if we haven't left it yet. The wrapper does not store this info for legacy group so we check from the DB entry instead - if (!legacyGroupConvo.get('isKickedFromGroup') && !legacyGroupConvo.get('left')) { + // start polling for this group if we are still part of it. + if (!legacyGroupConvo.isKickedFromGroup()) { getSwarmPollingInstance().addGroupId(PubKey.cast(fromWrapper.pubkeyHex)); - // save the encryption keypair if needed + // save the encryption key pair if needed if (!isEmpty(fromWrapper.encPubkey) && !isEmpty(fromWrapper.encSeckey)) { try { const inWrapperKeypair: HexKeyPair = { @@ -669,13 +686,13 @@ async function handleLegacyGroupUpdate(latestEnvelopeTimestamp: number) { await addKeyPairToCacheAndDBIfNeeded(fromWrapper.pubkeyHex, inWrapperKeypair); } catch (e) { - window.log.warn('failed to save keypair for legacugroup', fromWrapper.pubkeyHex); + window.log.warn('failed to save key pair for legacy group', fromWrapper.pubkeyHex); } } } if (changes) { - // this commit will grab the latest encryption keypair and add it to the user group wrapper if needed + // this commit will grab the latest encryption key pair and add it to the user group wrapper if needed await legacyGroupConvo.commit(); } @@ -684,7 +701,109 @@ async function handleLegacyGroupUpdate(latestEnvelopeTimestamp: number) { } } -async function handleUserGroupsUpdate(result: IncomingConfResult): Promise { +async function handleSingleGroupUpdate({ + groupInWrapper, + userEdKeypair, +}: { + groupInWrapper: UserGroupsGet; + latestEnvelopeTimestamp: number; + userEdKeypair: UserUtils.ByteKeyPair; +}) { + const groupPk = groupInWrapper.pubkeyHex; + try { + // dump is always empty when creating a new groupInfo + await MetaGroupWrapperActions.init(groupPk, { + metaDumped: null, + userEd25519Secretkey: toFixedUint8ArrayOfLength(userEdKeypair.privKeyBytes, 64).buffer, + groupEd25519Secretkey: groupInWrapper.secretKey, + groupEd25519Pubkey: toFixedUint8ArrayOfLength(HexString.fromHexString(groupPk.slice(2)), 32) + .buffer, + }); + } catch (e) { + window.log.warn( + `handleSingleGroupUpdate meta wrapper init of "${groupPk}" failed with`, + e.message + ); + } + + if (!ConvoHub.use().get(groupPk)) { + const created = await ConvoHub.use().getOrCreateAndWait(groupPk, ConversationTypeEnum.GROUPV2); + const joinedAt = groupInWrapper.joinedAtSeconds * 1000 || Date.now(); + const expireTimer = + groupInWrapper.disappearingTimerSeconds && groupInWrapper.disappearingTimerSeconds > 0 + ? groupInWrapper.disappearingTimerSeconds + : undefined; + created.set({ + active_at: joinedAt, + displayNameInProfile: groupInWrapper.name || undefined, + priority: groupInWrapper.priority, + lastJoinedTimestamp: joinedAt, + expireTimer, + expirationMode: expireTimer ? 'deleteAfterSend' : 'off', + }); + await created.commit(); + getSwarmPollingInstance().addGroupId(PubKey.cast(groupPk)); + } +} + +async function handleSingleGroupUpdateToLeave(toLeave: GroupPubkeyType) { + // that group is not in the wrapper but in our local DB. it must be removed and cleaned + try { + window.log.debug( + `About to deleteGroup ${toLeave} via handleSingleGroupUpdateToLeave as in DB but not in wrapper` + ); + + await ConvoHub.use().deleteGroup(toLeave, { + fromSyncMessage: true, + sendLeaveMessage: false, + deletionType: 'doNotKeep', + deleteAllMessagesOnSwarm: false, + forceDestroyForAllMembers: false, + }); + } catch (e) { + window.log.info('Failed to deleteClosedGroup with: ', e.message); + } +} + +/** + * Called when we just got a userGroups merge from the network. We need to apply the changes to our local state. (i.e. DB and redux slice of 03 groups) + */ +async function handleGroupUpdate(latestEnvelopeTimestamp: number) { + // first let's check which groups needs to be joined or left by doing a diff of what is in the wrapper and what is in the DB + const allGroupsInWrapper = await UserGroupsWrapperActions.getAllGroups(); + const allGroupsIdsInDb = ConvoHub.use() + .getConversations() + .map(m => m.id) + .filter(PubKey.is03Pubkey); + + const allGroupsIdsInWrapper = allGroupsInWrapper.map(m => m.pubkeyHex); + window.log.debug('allGroupsIdsInWrapper', stringify(allGroupsIdsInWrapper)); + window.log.debug('allGroupsIdsInDb', stringify(allGroupsIdsInDb)); + + const userEdKeypair = await UserUtils.getUserED25519KeyPairBytes(); + if (!userEdKeypair) { + throw new Error('userEdKeypair is not set'); + } + + for (let index = 0; index < allGroupsInWrapper.length; index++) { + const groupInWrapper = allGroupsInWrapper[index]; + window.inboxStore?.dispatch(groupInfoActions.handleUserGroupUpdate(groupInWrapper) as any); + + await handleSingleGroupUpdate({ groupInWrapper, latestEnvelopeTimestamp, userEdKeypair }); + } + + const groupsInDbButNotInWrapper = difference(allGroupsIdsInDb, allGroupsIdsInWrapper); + window.log.info( + `we have to leave ${groupsInDbButNotInWrapper.length} 03 groups in DB compared to what is in the wrapper` + ); + + for (let index = 0; index < groupsInDbButNotInWrapper.length; index++) { + const toRemove = groupsInDbButNotInWrapper[index]; + await handleSingleGroupUpdateToLeave(toRemove); + } +} + +async function handleUserGroupsUpdate(result: IncomingUserResult) { const toHandle = SessionUtilUserGroups.getUserGroupTypes(); for (let index = 0; index < toHandle.length; index++) { const typeToHandle = toHandle[index]; @@ -695,13 +814,14 @@ async function handleUserGroupsUpdate(result: IncomingConfResult): Promise { +async function handleConvoInfoVolatileUpdate() { const types = SessionUtilConvoInfoVolatile.getConvoInfoVolatileTypes(); for (let typeIndex = 0; typeIndex < types.length; typeIndex++) { const type = types[typeIndex]; @@ -781,9 +899,9 @@ async function handleConvoInfoVolatileUpdate( break; case 'Community': try { - const wrapperComms = await ConvoInfoVolatileWrapperActions.getAllCommunities(); - for (let index = 0; index < wrapperComms.length; index++) { - const fromWrapper = wrapperComms[index]; + const wrapperCommunities = await ConvoInfoVolatileWrapperActions.getAllCommunities(); + for (let index = 0; index < wrapperCommunities.length; index++) { + const fromWrapper = wrapperCommunities[index]; const convoId = getOpenGroupV2ConversationId( fromWrapper.baseUrl, @@ -824,15 +942,37 @@ async function handleConvoInfoVolatileUpdate( } break; + case 'Group': + try { + const groupsV2 = await ConvoInfoVolatileWrapperActions.getAllGroups(); + for (let index = 0; index < groupsV2.length; index++) { + const fromWrapper = groupsV2[index]; + + try { + await applyConvoVolatileUpdateFromWrapper( + fromWrapper.pubkeyHex, + fromWrapper.unread, + fromWrapper.lastRead + ); + } catch (e) { + window.log.warn( + 'handleConvoInfoVolatileUpdate of "Group" failed with error: ', + e.message + ); + } + } + } catch (e) { + window.log.warn('getAllGroups of "Group" failed with error: ', e.message); + } + break; + default: - assertUnreachable(type, `handleConvoInfoVolatileUpdate: unhandeld switch case: ${type}`); + assertUnreachable(type, `handleConvoInfoVolatileUpdate: unhandled switch case: ${type}`); } } - - return result; } -async function processMergingResults(results: Map) { +async function processUserMergingResults(results: Map) { if (!results || !results.size) { return; } @@ -847,30 +987,32 @@ async function processMergingResults(results: Map ) { - const userConfigLibsession = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); - - if (!userConfigLibsession) { - return; - } - if (isEmpty(configMessages)) { return; } @@ -922,33 +1057,35 @@ async function handleConfigMessagesViaLibSession( window?.log?.debug( `Handling our sharedConfig message via libsession_util ${JSON.stringify( configMessages.map(m => ({ - namespace: m.namespace, hash: m.hash, + namespace: m.namespace, })) )}` ); - const incomingMergeResult = await mergeConfigsWithIncomingUpdates(configMessages); - - await processMergingResults(incomingMergeResult); + const incomingMergeResult = await mergeUserConfigsWithIncomingUpdates(configMessages); + await processUserMergingResults(incomingMergeResult); } -async function updateOurProfileLegacyOrViaLibSession({ - sentAt, - displayName, - profileUrl, - profileKey, - priority, -}: Profile & { sentAt: number }) { - await ProfileManager.updateOurProfileSync({ +async function updateOurProfileViaLibSession( + { displayName, - profileUrl, - profileKey, priority, - }); + profileKey, + profileUrl, + sentAt, + }: { + sentAt: number; + displayName: string; + profileUrl: string | null; + profileKey: Uint8Array | null; + priority: number | null; + } // passing null means to not update the priority at all (used for legacy config message for now) +) { + await ProfileManager.updateOurProfileSync({ displayName, profileUrl, profileKey, priority }); await setLastProfileUpdateTimestamp(toNumber(sentAt)); - // do not trigger a signin by linking if the display name is empty + // do not trigger a sign in by linking if the display name is empty if (!isEmpty(displayName)) { trigger(configurationMessageReceived, displayName); } else { @@ -956,180 +1093,6 @@ async function updateOurProfileLegacyOrViaLibSession({ } } -async function handleGroupsAndContactsFromConfigMessageLegacy( - envelope: EnvelopePlus, - configMessage: SignalService.ConfigurationMessage -) { - const userConfigLibsession = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); - - if (userConfigLibsession && Registration.isDone()) { - return; - } - const envelopeTimestamp = toNumber(envelope.timestamp); - - // at some point, we made the hasSyncedInitialConfigurationItem item to have a value=true and a timestamp set. - // we can actually just use the timestamp as a boolean, as if it is set, we know we have synced the initial config - // but we still need to handle the case where the timestamp was set when the value is true (for backwards compatiblity, until we get rid of the config message legacy) - const lastConfigUpdate = await Data.getItemById(SettingsKey.hasSyncedInitialConfigurationItem); - - let lastConfigTimestamp: number | undefined; - if (isNumber(lastConfigUpdate?.value)) { - lastConfigTimestamp = lastConfigUpdate?.value; - } else if (isNumber((lastConfigUpdate as any)?.timestamp)) { - lastConfigTimestamp = (lastConfigUpdate as any)?.timestamp; // ugly, but we can remove it once we dropped support for legacy config message, see comment above - } - - const isNewerConfig = - !lastConfigTimestamp || (lastConfigTimestamp && lastConfigTimestamp < envelopeTimestamp); - - if (!isNewerConfig) { - window?.log?.info('Received outdated configuration message... Dropping message.'); - return; - } - - await Storage.put(SettingsKey.hasSyncedInitialConfigurationItem, envelopeTimestamp); - - void handleOpenGroupsFromConfigLegacy(configMessage.openGroups); - - if (configMessage.contacts?.length) { - await Promise.all( - configMessage.contacts.map(async c => handleContactFromConfigLegacy(c, envelope)) - ); - } -} - -/** - * Trigger a join for all open groups we are not already in. - * @param openGroups string array of open group urls - */ -const handleOpenGroupsFromConfigLegacy = async (openGroups: Array) => { - const userConfigLibsession = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); - - if (userConfigLibsession && Registration.isDone()) { - return; - } - const numberOpenGroup = openGroups?.length || 0; - for (let i = 0; i < numberOpenGroup; i++) { - const currentOpenGroupUrl = openGroups[i]; - const parsedRoom = parseOpenGroupV2(currentOpenGroupUrl); - if (!parsedRoom) { - continue; - } - const roomConvoId = getOpenGroupV2ConversationId(parsedRoom.serverUrl, parsedRoom.roomId); - if (!getConversationController().get(roomConvoId)) { - window?.log?.info( - `triggering join of public chat '${currentOpenGroupUrl}' from ConfigurationMessage` - ); - void joinOpenGroupV2WithUIEvents(currentOpenGroupUrl, false, true); - } - } -}; - -/** - * Handles adding of a contact and setting approval/block status - * @param contactReceived Contact to sync - */ -const handleContactFromConfigLegacy = async ( - contactReceived: SignalService.ConfigurationMessage.IContact, - envelope: EnvelopePlus -) => { - const userConfigLibsession = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); - - if (userConfigLibsession && Registration.isDone()) { - return; - } - try { - if (!contactReceived.publicKey?.length) { - return; - } - const contactConvo = await getConversationController().getOrCreateAndWait( - toHex(contactReceived.publicKey), - ConversationTypeEnum.PRIVATE - ); - const profileInDataMessage: SignalService.DataMessage.ILokiProfile = { - displayName: contactReceived.name, - profilePicture: contactReceived.profilePicture, - }; - - const existingActiveAt = contactConvo.get('active_at'); - if (!existingActiveAt || existingActiveAt === 0) { - contactConvo.set('active_at', toNumber(envelope.timestamp)); - } - - // checking for existence of field on protobuf - if (contactReceived.isApproved === true) { - if (!contactConvo.isApproved()) { - await contactConvo.setIsApproved(Boolean(contactReceived.isApproved)); - await contactConvo.addOutgoingApprovalMessage(toNumber(envelope.timestamp)); - } - - if (contactReceived.didApproveMe === true) { - // checking for existence of field on message - await contactConvo.setDidApproveMe(Boolean(contactReceived.didApproveMe)); - } - } - - // only set for explicit true/false values in case outdated sender doesn't have the fields - if (contactReceived.isBlocked === true) { - if (contactConvo.isIncomingRequest()) { - // handling case where restored device's declined message requests were getting restored - await ConversationInteraction.deleteAllMessagesByConvoIdNoConfirmation(contactConvo.id); - } - await BlockedNumberController.block(contactConvo.id); - } else if (contactReceived.isBlocked === false) { - await BlockedNumberController.unblockAll([contactConvo.id]); - } - - await ProfileManager.updateProfileOfContact( - contactConvo.id, - profileInDataMessage.displayName || undefined, - profileInDataMessage.profilePicture || null, - contactReceived.profileKey || null - ); - } catch (e) { - window?.log?.warn('failed to handle a new closed group from configuration message'); - } -}; - -/** - * This is the legacy way of handling incoming configuration message. - * Should not be used at all soon. - */ -async function handleConfigurationMessageLegacy( - envelope: EnvelopePlus, - configurationMessage: SignalService.ConfigurationMessage -): Promise { - // when the useSharedUtilForUserConfig flag is ON, we want only allow a legacy config message if we are registering a new user. - // this is to allow users linking a device to find their config message if they do not have a shared config message yet. - // the process of those messages is always done after the process of the shared config messages, so that's only a fallback. - const userConfigLibsession = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); - - if (userConfigLibsession && !isSignInByLinking()) { - window?.log?.info( - 'useSharedUtilForUserConfig is set, not handling config messages with "handleConfigurationMessageLegacy()"' - ); - await window.setSettingValue(SettingsKey.someDeviceOutdatedSyncing, true); - await removeFromCache(envelope); - return; - } - - window?.log?.info('Handling legacy configuration message'); - const ourPubkey = UserUtils.getOurPubKeyStrFromCache(); - if (!ourPubkey) { - return; - } - - if (envelope.source !== ourPubkey) { - window?.log?.info('Dropping configuration change from someone else than us.'); - await removeFromCache(envelope); - return; - } - - await handleGroupsAndContactsFromConfigMessageLegacy(envelope, configurationMessage); - await removeFromCache(envelope); -} - export const ConfigMessageHandler = { - handleConfigurationMessageLegacy, - handleConfigMessagesViaLibSession, + handleUserConfigMessagesViaLibSession, }; diff --git a/ts/receiver/contentMessage.ts b/ts/receiver/contentMessage.ts index 022dee13a6..8e227327bc 100644 --- a/ts/receiver/contentMessage.ts +++ b/ts/receiver/contentMessage.ts @@ -1,11 +1,11 @@ -import { compact, flatten, identity, isEmpty, isFinite, pickBy, toNumber } from 'lodash'; +import { compact, flatten, isEmpty, isFinite, toNumber } from 'lodash'; import { handleSwarmDataMessage } from './dataMessage'; import { EnvelopePlus } from './types'; import { SignalService } from '../protobuf'; import { KeyPrefixType, PubKey } from '../session/types'; -import { removeFromCache, updateCacheWithDecryptedContent } from './cache'; +import { IncomingMessageCache } from './cache'; import { Data } from '../data/data'; import { SettingsKey } from '../data/settings-key'; @@ -14,7 +14,7 @@ import { deleteMessagesFromSwarmAndMarkAsDeletedLocally, } from '../interactions/conversations/unsendingInteractions'; import { findCachedBlindedMatchOrLookupOnAllServers } from '../session/apis/open_group_api/sogsv3/knownBlindedkeys'; -import { getConversationController } from '../session/conversations'; +import { ConvoHub } from '../session/conversations'; import { concatUInt8Array, getSodiumRenderer } from '../session/crypto'; import { removeMessagePadding } from '../session/crypto/BufferPadding'; import { DisappearingMessages } from '../session/disappearing_messages'; @@ -22,16 +22,18 @@ import { ReadyToDisappearMsgUpdate } from '../session/disappearing_messages/type import { ProfileManager } from '../session/profile_manager/ProfileManager'; import { UserUtils } from '../session/utils'; import { perfEnd, perfStart } from '../session/utils/Performance'; -import { fromHexToArray, toHex } from '../session/utils/String'; +import { ed25519Str, fromHexToArray, toHex } from '../session/utils/String'; import { isUsFromCache } from '../session/utils/User'; import { assertUnreachable } from '../types/sqlSharedTypes'; import { BlockedNumberController } from '../util'; import { ReadReceipts } from '../util/readReceipts'; import { Storage } from '../util/storage'; -import { ContactsWrapperActions } from '../webworker/workers/browser/libsession_worker_interface'; +import { + ContactsWrapperActions, + MetaGroupWrapperActions, +} from '../webworker/workers/browser/libsession_worker_interface'; import { handleCallMessage } from './callMessage'; import { getAllCachedECKeyPair, sentAtMoreRecentThanWrapper } from './closedGroups'; -import { ConfigMessageHandler } from './configMessage'; import { ECKeyPair } from './keypairs'; import { CONVERSATION_PRIORITIES, ConversationTypeEnum } from '../models/types'; @@ -41,21 +43,20 @@ export async function handleSwarmContentMessage( messageExpirationFromRetrieve: number | null ) { try { - const plaintext = await decrypt(envelope); + const decryptedForAll = await decrypt(envelope); - if (!plaintext) { - return; - } - if (plaintext instanceof ArrayBuffer && plaintext.byteLength === 0) { + if (!decryptedForAll || !decryptedForAll.decryptedContent || isEmpty(decryptedForAll)) { return; } + const sentAtTimestamp = toNumber(envelope.timestamp); + // swarm messages already comes with a timestamp in milliseconds, so this sentAtTimestamp is correct. // the sogs messages do not come as milliseconds but just seconds, so we override it await innerHandleSwarmContentMessage({ envelope, sentAtTimestamp, - plaintext, + contentDecrypted: decryptedForAll.decryptedContent, messageHash, messageExpirationFromRetrieve, }); @@ -64,8 +65,9 @@ export async function handleSwarmContentMessage( } } -async function decryptForClosedGroup(envelope: EnvelopePlus) { - // case .closedGroupCiphertext: for ios +async function decryptForClosedGroup( + envelope: EnvelopePlus +): Promise<{ decryptedContent: ArrayBuffer }> { window?.log?.info('received closed group message'); try { const hexEncodedGroupPublicKey = envelope.source; @@ -94,15 +96,19 @@ async function decryptForClosedGroup(envelope: EnvelopePlus) { const encryptionKeyPair = ECKeyPair.fromHexKeyPair(hexEncryptionKeyPair); // eslint-disable-next-line no-await-in-loop - decryptedContent = await decryptWithSessionProtocol( + const res = await decryptWithSessionProtocol( envelope, envelope.content, encryptionKeyPair, true ); - if (decryptedContent?.byteLength) { + if (res?.decryptedContent.byteLength) { + decryptedContent = res.decryptedContent; + break; } + decryptedContent = res.decryptedContent; + keyIndex++; } catch (e) { window?.log?.info( @@ -123,7 +129,8 @@ async function decryptForClosedGroup(envelope: EnvelopePlus) { } window?.log?.info('ClosedGroup Message decrypted successfully with keyIndex:', keyIndex); - return removeMessagePadding(decryptedContent); + const withoutPadding = removeMessagePadding(decryptedContent); + return { decryptedContent: withoutPadding }; } catch (e) { /** * If an error happened during the decoding, @@ -156,7 +163,7 @@ export async function decryptWithSessionProtocol( ciphertextObj: ArrayBuffer, x25519KeyPair: ECKeyPair, isClosedGroup?: boolean -): Promise { +): Promise<{ decryptedContent: ArrayBuffer }> { perfStart(`decryptWithSessionProtocol-${envelope.id}`); const recipientX25519PrivateKey = x25519KeyPair.privateKeyData; const hex = toHex(new Uint8Array(x25519KeyPair.publicKeyData)); @@ -218,7 +225,7 @@ export async function decryptWithSessionProtocol( } perfEnd(`decryptWithSessionProtocol-${envelope.id}`, 'decryptWithSessionProtocol'); - return plaintext; + return { decryptedContent: plaintext }; } /** @@ -242,17 +249,13 @@ export async function decryptEnvelopeWithOurKey( userX25519KeyPair.privKey ); - // keep the await so the try catch works as expected - perfStart(`decryptUnidentifiedSender-${envelope.id}`); - - const retSessionProtocol = await decryptWithSessionProtocol( + const { decryptedContent } = await decryptWithSessionProtocol( envelope, envelope.content, ecKeyPair ); - const ret = removeMessagePadding(retSessionProtocol); - perfEnd(`decryptUnidentifiedSender-${envelope.id}`, 'decryptUnidentifiedSender'); + const ret = removeMessagePadding(decryptedContent); return ret; } catch (e) { @@ -261,41 +264,50 @@ export async function decryptEnvelopeWithOurKey( } } -async function decrypt(envelope: EnvelopePlus): Promise { +async function decrypt(envelope: EnvelopePlus): Promise<{ decryptedContent: ArrayBuffer } | null> { if (envelope.content.byteLength === 0) { throw new Error('Received an empty envelope.'); } - let plaintext: ArrayBuffer | null = null; + let decryptedContent: ArrayBuffer | null = null; switch (envelope.type) { // Only SESSION_MESSAGE and CLOSED_GROUP_MESSAGE are supported case SignalService.Envelope.Type.SESSION_MESSAGE: - plaintext = await decryptEnvelopeWithOurKey(envelope); + decryptedContent = await decryptEnvelopeWithOurKey(envelope); break; case SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE: - plaintext = await decryptForClosedGroup(envelope); + if (PubKey.is03Pubkey(envelope.source)) { + // groupv2 messages are decrypted way earlier than this via libsession, and what we get here is already decrypted + return { decryptedContent: envelope.content }; + } + // eslint-disable-next-line no-case-declarations + const res = await decryptForClosedGroup(envelope); + decryptedContent = res.decryptedContent; + break; default: assertUnreachable(envelope.type, `Unknown message type:${envelope.type}`); } - if (!plaintext) { + if (!decryptedContent) { // content could not be decrypted. - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return null; } perfStart(`updateCacheWithDecryptedContent-${envelope.id}`); - await updateCacheWithDecryptedContent(envelope, plaintext).catch((error: any) => { - window?.log?.error( - 'decrypt failed to save decrypted message contents to cache:', - error && error.stack ? error.stack : error - ); - }); + await IncomingMessageCache.updateCacheWithDecryptedContent({ envelope, decryptedContent }).catch( + error => { + window?.log?.error( + 'decrypt failed to save decrypted message contents to cache:', + error && error.stack ? error.stack : error + ); + } + ); perfEnd(`updateCacheWithDecryptedContent-${envelope.id}`, 'updateCacheWithDecryptedContent'); - return plaintext; + return { decryptedContent }; } async function shouldDropIncomingPrivateMessage( @@ -322,7 +334,7 @@ async function shouldDropIncomingPrivateMessage( // handle the `us` case first, as we will never find ourselves in the contacts wrapper. The NTS details are in the UserProfile wrapper. if (isUs) { - const us = getConversationController().get(envelope.source); + const us = ConvoHub.use().get(envelope.source); const ourPriority = us?.get('priority') || CONVERSATION_PRIORITIES.default; if (us && ourPriority <= CONVERSATION_PRIORITIES.hidden) { // if the wrapper data is more recent than this message and the NTS conversation is hidden, just drop this incoming message to avoid showing the NTS conversation again. @@ -371,69 +383,103 @@ async function shouldDropIncomingPrivateMessage( function shouldDropBlockedUserMessage( content: SignalService.Content, - groupPubkey: string + fromSwarmOf: string ): boolean { - // Even if the user is blocked, we should allow the message if: + // Even if the user is blocked, we should allow a group control message message if: // - it is a group message AND // - the group exists already on the db (to not join a closed group created by a blocked user) AND // - the group is not blocked AND - // - the message is only control (no body/attachments/quote/groupInvitation/contact/preview) + // - the message is a LegacyControlMessage or GroupUpdateMessage + // In addition to the above, we also want to allow a groupUpdatePromote message (sent as a 1o1 message) - if (!groupPubkey) { + if (!fromSwarmOf) { return true; } - const groupConvo = getConversationController().get(groupPubkey); - if (!groupConvo || !groupConvo.isClosedGroup()) { + const convo = ConvoHub.use().get(fromSwarmOf); + if (!convo || !content.dataMessage || isEmpty(content.dataMessage)) { + // returning true means that we drop that message return true; } - if (groupConvo.isBlocked()) { + if (convo.isClosedGroup() && convo.isBlocked()) { + // when we especially blocked a group, we don't want to process anything from it return true; } - // first check that dataMessage is the only field set in the Content - let msgWithoutDataMessage = pickBy( - content, - (_value, key) => key !== 'dataMessage' && key !== 'toJSON' - ); - msgWithoutDataMessage = pickBy(msgWithoutDataMessage, identity); + const data = content.dataMessage as SignalService.DataMessage; // forcing it as we do know this field is set based on last line - const isMessageDataMessageOnly = isEmpty(msgWithoutDataMessage); - if (!isMessageDataMessageOnly) { + if (convo.isPrivate()) { + const isGroupV2PromoteMessage = !isEmpty( + content.dataMessage?.groupUpdateMessage?.promoteMessage + ); + if (isGroupV2PromoteMessage) { + // we want to allow a group v2 promote message sent by a blocked user (because that user is an admin of a group) + return false; + } + } + + if (!convo.isClosedGroup()) { + // 1o1 messages are handled above. + // if we get here and it's not part a closed group, we should drop that message. + // it might be a message sent to a community from a user we've blocked return true; } - const data = content.dataMessage as SignalService.DataMessage; // forcing it as we do know this field is set based on last line - const isControlDataMessageOnly = - !data.body && - !data.preview?.length && - !data.attachments?.length && - !data.openGroupInvitation && - !data.quote; - - return !isControlDataMessageOnly; + const isLegacyGroupUpdateMessage = !isEmpty(data.closedGroupControlMessage); + + const isGroupV2UpdateMessage = !isEmpty(data.groupUpdateMessage); + + return !isLegacyGroupUpdateMessage && !isGroupV2UpdateMessage; +} + +async function dropIncomingGroupMessage(envelope: EnvelopePlus, sentAtTimestamp: number) { + try { + if (PubKey.is03Pubkey(envelope.source)) { + const infos = await MetaGroupWrapperActions.infoGet(envelope.source); + + if (!infos) { + return false; + } + + if ( + sentAtTimestamp && + ((infos.deleteAttachBeforeSeconds && + sentAtTimestamp <= infos.deleteAttachBeforeSeconds * 1000) || + (infos.deleteBeforeSeconds && sentAtTimestamp <= infos.deleteBeforeSeconds * 1000)) + ) { + window?.log?.info( + `Incoming message sent before the group ${ed25519Str(envelope.source)} deleteBeforeSeconds or deleteAttachBeforeSeconds. Dropping it.` + ); + await IncomingMessageCache.removeFromCache(envelope); + return true; + } + } + } catch (e) { + window?.log?.warn( + `dropIncomingGroupMessage failed for group ${ed25519Str(envelope.source)} with `, + e.message + ); + } + return false; } export async function innerHandleSwarmContentMessage({ + contentDecrypted, envelope, messageHash, - plaintext, sentAtTimestamp, messageExpirationFromRetrieve, }: { envelope: EnvelopePlus; sentAtTimestamp: number; - plaintext: ArrayBuffer; + contentDecrypted: ArrayBuffer; messageHash: string; messageExpirationFromRetrieve: number | null; }): Promise { try { - perfStart(`SignalService.Content.decode-${envelope.id}`); window.log.info('innerHandleSwarmContentMessage'); - perfStart(`isBlocked-${envelope.id}`); - const content = SignalService.Content.decode(new Uint8Array(plaintext)); - perfEnd(`SignalService.Content.decode-${envelope.id}`, 'SignalService.Content.decode'); + const content = SignalService.Content.decode(new Uint8Array(contentDecrypted)); /** * senderIdentity is set ONLY if that message is a closed group message. @@ -445,15 +491,23 @@ export async function innerHandleSwarmContentMessage({ */ const blocked = BlockedNumberController.isBlocked(envelope.senderIdentity || envelope.source); - perfEnd(`isBlocked-${envelope.id}`, 'isBlocked'); if (blocked) { const envelopeSource = envelope.source; // We want to allow a blocked user message if that's a control message for a known group and the group is not blocked if (shouldDropBlockedUserMessage(content, envelopeSource)) { - window?.log?.info('Dropping blocked user message'); + window?.log?.info( + `Dropping blocked user message ${ed25519Str(envelope.senderIdentity || envelope.source)}` + ); return; } - window?.log?.info('Allowing group-control message only from blocked user'); + window?.log?.info( + `Allowing control/update message only from blocked user ${ed25519Str(envelope.senderIdentity)} in group: ${ed25519Str(envelope.source)}` + ); + } + + if (await dropIncomingGroupMessage(envelope, sentAtTimestamp)) { + // message removed from cache in `dropIncomingGroupMessage` already + return; } // if this is a direct message, envelope.senderIdentity is undefined @@ -462,7 +516,7 @@ export async function innerHandleSwarmContentMessage({ if (isPrivateConversationMessage) { if (await shouldDropIncomingPrivateMessage(sentAtTimestamp, envelope, content)) { - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } } @@ -471,7 +525,7 @@ export async function innerHandleSwarmContentMessage({ * For a closed group message, this holds the conversation with that specific user outside of the closed group. * For a private conversation message, this is just the conversation with that user */ - const senderConversationModel = await getConversationController().getOrCreateAndWait( + const senderConversationModel = await ConvoHub.use().getOrCreateAndWait( isPrivateConversationMessage ? envelope.source : envelope.senderIdentity, ConversationTypeEnum.PRIVATE ); @@ -481,7 +535,7 @@ export async function innerHandleSwarmContentMessage({ // For a private synced message, we need to make sure we have the conversation with the syncTarget if (isPrivateConversationMessage && content.dataMessage?.syncTarget) { - conversationModelForUIUpdate = await getConversationController().getOrCreateAndWait( + conversationModelForUIUpdate = await ConvoHub.use().getOrCreateAndWait( content.dataMessage.syncTarget, ConversationTypeEnum.PRIVATE ); @@ -492,11 +546,11 @@ export async function innerHandleSwarmContentMessage({ * For a private conversation message, this is just the conversation with that user */ if (!isPrivateConversationMessage) { - // this is a closed group message, we have a second conversation to make sure exists - conversationModelForUIUpdate = await getConversationController().getOrCreateAndWait( - envelope.source, - ConversationTypeEnum.GROUP - ); + // this is a group message, + // we have a second conversation to make sure exists: the group conversation + conversationModelForUIUpdate = PubKey.is03Pubkey(envelope.source) + ? await ConvoHub.use().getOrCreateAndWait(envelope.source, ConversationTypeEnum.GROUPV2) + : await ConvoHub.use().getOrCreateAndWait(envelope.source, ConversationTypeEnum.GROUP); } const expireUpdate = await DisappearingMessages.checkForExpireUpdateInContentMessage( @@ -509,7 +563,6 @@ export async function innerHandleSwarmContentMessage({ if (isEmpty(content.dataMessage.profileKey)) { content.dataMessage.profileKey = null; } - // TODO legacy messages support will be removed in a future release if (expireUpdate?.isDisappearingMessagesV2Released) { await DisappearingMessages.checkHasOutdatedDisappearingMessageClient( @@ -518,21 +571,19 @@ export async function innerHandleSwarmContentMessage({ expireUpdate ); if (expireUpdate.isLegacyConversationSettingMessage) { - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } } - - perfStart(`handleSwarmDataMessage-${envelope.id}`); await handleSwarmDataMessage({ envelope, sentAtTimestamp, rawDataMessage: content.dataMessage as SignalService.DataMessage, messageHash, senderConversationModel, - expireUpdate, + expireUpdate: expireUpdate || null, }); - perfEnd(`handleSwarmDataMessage-${envelope.id}`, 'handleSwarmDataMessage'); + return; } @@ -550,21 +601,7 @@ export async function innerHandleSwarmContentMessage({ perfEnd(`handleTypingMessage-${envelope.id}`, 'handleTypingMessage'); return; } - if (content.configurationMessage) { - // this one can be quite long (downloads profilePictures and everything), - // so do not await it - void ConfigMessageHandler.handleConfigurationMessageLegacy( - envelope, - content.configurationMessage as SignalService.ConfigurationMessage - ); - return; - } - if (content.sharedConfigMessage) { - window.log.warn('content.sharedConfigMessage are handled outside of the receiving pipeline'); - // this should never happen, but remove it from cache just in case something is messed up - await removeFromCache(envelope); - return; - } + if (content.dataExtractionNotification) { perfStart(`handleDataExtractionNotification-${envelope.id}`); @@ -583,19 +620,26 @@ export async function innerHandleSwarmContentMessage({ } if (content.unsendMessage) { await handleUnsendMessage(envelope, content.unsendMessage as SignalService.Unsend); + return; } if (content.callMessage) { await handleCallMessage(envelope, content.callMessage as SignalService.CallMessage, { expireDetails: expireUpdate, messageHash, }); + return; } if (content.messageRequestResponse) { await handleMessageRequestResponse( envelope, content.messageRequestResponse as SignalService.MessageRequestResponse ); + return; } + + // If we get here, we don't know how to handle that envelope. probably a very old type of message, or something we don't support. + // There is not much we can do expect drop it + await IncomingMessageCache.removeFromCache(envelope); } catch (e) { window?.log?.warn(e.message); } @@ -634,7 +678,7 @@ async function handleReceiptMessage( } await Promise.all(results); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } async function handleTypingMessage( @@ -644,7 +688,7 @@ async function handleTypingMessage( const { timestamp, action } = typingMessage; const { source } = envelope; - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); // We don't do anything with incoming typing messages if the setting is disabled if (!Storage.get(SettingsKey.settingsTypingIndicator)) { @@ -652,19 +696,19 @@ async function handleTypingMessage( } if (envelope.timestamp && timestamp) { - const envelopeTimestamp = toNumber(envelope.timestamp); + const sentAtTimestamp = toNumber(envelope.timestamp); const typingTimestamp = toNumber(timestamp); - if (typingTimestamp !== envelopeTimestamp) { + if (typingTimestamp !== sentAtTimestamp) { window?.log?.warn( - `Typing message envelope timestamp (${envelopeTimestamp}) did not match typing timestamp (${typingTimestamp})` + `Typing message envelope timestamp (${sentAtTimestamp}) did not match typing timestamp (${typingTimestamp})` ); return; } } // typing message are only working with direct chats/ not groups - const conversation = getConversationController().get(source); + const conversation = ConvoHub.use().get(source); const started = action === SignalService.TypingMessage.Action.STARTED; @@ -689,19 +733,19 @@ async function handleUnsendMessage(envelope: EnvelopePlus, unsendMessage: Signal window?.log?.error( 'handleUnsendMessage: Dropping request as the author and the sender differs.' ); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } if (!unsendMessage) { window?.log?.error('handleUnsendMessage: Invalid parameters -- dropping message.'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } if (!timestamp) { window?.log?.error('handleUnsendMessage: Invalid timestamp -- dropping message'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } @@ -719,9 +763,9 @@ async function handleUnsendMessage(envelope: EnvelopePlus, unsendMessage: Signal // #region executing deletion if (messageHash && messageToDelete) { window.log.info('handleUnsendMessage: got a request to delete ', messageHash); - const conversation = getConversationController().get(messageToDelete.get('conversationId')); + const conversation = ConvoHub.use().get(messageToDelete.get('conversationId')); if (!conversation) { - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } @@ -739,7 +783,7 @@ async function handleUnsendMessage(envelope: EnvelopePlus, unsendMessage: Signal messageToDelete?.id ); } - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } /** @@ -749,14 +793,9 @@ async function handleMessageRequestResponse( envelope: EnvelopePlus, messageRequestResponse: SignalService.MessageRequestResponse ) { - const { isApproved } = messageRequestResponse; - if (!isApproved) { - await removeFromCache(envelope); - return; - } - if (!messageRequestResponse) { + if (!messageRequestResponse || !messageRequestResponse.isApproved) { window?.log?.error('handleMessageRequestResponse: Invalid parameters -- dropping message.'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } @@ -765,30 +804,40 @@ async function handleMessageRequestResponse( const convosToMerge = findCachedBlindedMatchOrLookupOnAllServers(envelope.source, sodium); const unblindedConvoId = envelope.source; - const conversationToApprove = await getConversationController().getOrCreateAndWait( + if (!PubKey.is05Pubkey(unblindedConvoId)) { + window?.log?.warn( + 'handleMessageRequestResponse: Invalid unblindedConvoId -- dropping message.' + ); + await IncomingMessageCache.removeFromCache(envelope); + return; + } + + const conversationToApprove = await ConvoHub.use().getOrCreateAndWait( unblindedConvoId, ConversationTypeEnum.PRIVATE ); - let mostRecentActiveAt = Math.max(...compact(convosToMerge.map(m => m.get('active_at')))); + let mostRecentActiveAt = Math.max(...compact(convosToMerge.map(m => m.getActiveAt()))); if (!isFinite(mostRecentActiveAt) || mostRecentActiveAt <= 0) { mostRecentActiveAt = toNumber(envelope.timestamp); } + const previousApprovedMe = conversationToApprove.didApproveMe(); + await conversationToApprove.setDidApproveMe(true, false); + conversationToApprove.set({ active_at: mostRecentActiveAt, - isApproved: true, - didApproveMe: true, }); await conversationToApprove.unhideIfNeeded(false); + await conversationToApprove.commit(); if (convosToMerge.length) { // merge fields we care by hand conversationToApprove.set({ - profileKey: convosToMerge[0].get('profileKey'), - displayNameInProfile: convosToMerge[0].get('displayNameInProfile'), + profileKey: convosToMerge[0].getProfileKey(), + displayNameInProfile: convosToMerge[0].getRealSessionUsername(), avatarInProfile: convosToMerge[0].get('avatarInProfile'), - avatarPointer: convosToMerge[0].get('avatarPointer'), // don't set the avatar pointer + avatarPointer: convosToMerge[0].getAvatarPointer(), // don't set the avatar pointer // nickname might be set already in conversationToApprove, so don't overwrite it }); @@ -823,7 +872,7 @@ async function handleMessageRequestResponse( for (let index = 0; index < convosToMerge.length; index++) { const element = convosToMerge[index]; // eslint-disable-next-line no-await-in-loop - await getConversationController().deleteBlindedContact(element.id); + await ConvoHub.use().deleteBlindedContact(element.id); } } @@ -836,24 +885,22 @@ async function handleMessageRequestResponse( ); } - if (!conversationToApprove || conversationToApprove.didApproveMe()) { - await conversationToApprove?.commit(); - window?.log?.info( - 'Conversation already contains the correct value for the didApproveMe field.' - ); - await removeFromCache(envelope); + if (previousApprovedMe) { + await conversationToApprove.commit(); + window.log.info( + `convo ${ed25519Str(conversationToApprove.id)} previousApprovedMe is already true. Nothing to do ` + ); + await IncomingMessageCache.removeFromCache(envelope); return; } - await conversationToApprove.setDidApproveMe(true, true); // Conversation was not approved before so a sync is needed await conversationToApprove.addIncomingApprovalMessage( toNumber(envelope.timestamp), unblindedConvoId ); - - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); } /** @@ -877,9 +924,9 @@ export async function handleDataExtractionNotification({ const { type, timestamp: referencedAttachment } = dataExtractionNotification; const { source, timestamp } = envelope; - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); - const convo = getConversationController().get(source); + const convo = ConvoHub.use().get(source); if (!convo || !convo.isPrivate()) { window?.log?.info('Got DataNotification for unknown or non-private convo'); @@ -892,13 +939,13 @@ export async function handleDataExtractionNotification({ return; } - const envelopeTimestamp = toNumber(timestamp); + const sentAtTimestamp = toNumber(timestamp); const referencedAttachmentTimestamp = toNumber(referencedAttachment); let created = await convo.addSingleIncomingMessage({ source, messageHash, - sent_at: envelopeTimestamp, + sent_at: sentAtTimestamp, dataExtractionNotification: { type, referencedAttachmentTimestamp, // currently unused diff --git a/ts/receiver/dataMessage.ts b/ts/receiver/dataMessage.ts index 28dc48bb20..bc6c253dd3 100644 --- a/ts/receiver/dataMessage.ts +++ b/ts/receiver/dataMessage.ts @@ -1,16 +1,17 @@ /* eslint-disable no-param-reassign */ -import { isEmpty, isFinite, noop, omit, toNumber } from 'lodash'; +import { isEmpty, noop, omit, toNumber } from 'lodash'; import { SignalService } from '../protobuf'; -import { removeFromCache } from './cache'; +import { IncomingMessageCache } from './cache'; +import { getEnvelopeId } from './common'; import { EnvelopePlus } from './types'; import { Data } from '../data/data'; import { ConversationModel } from '../models/conversation'; -import { getConversationController } from '../session/conversations'; +import { ConvoHub } from '../session/conversations'; import { PubKey } from '../session/types'; import { StringUtils, UserUtils } from '../session/utils'; -import { handleClosedGroupControlMessage } from './closedGroups'; +import { handleLegacyClosedGroupControlMessage } from './closedGroups'; import { handleMessageJob, toRegularMessage } from './queuedJob'; import { MessageModel } from '../models/message'; @@ -19,12 +20,13 @@ import { createSwarmMessageSentFromUs, } from '../models/messageFactory'; import { DisappearingMessages } from '../session/disappearing_messages'; -import { DisappearingMessageUpdate } from '../session/disappearing_messages/types'; +import { WithDisappearingMessageUpdate } from '../session/disappearing_messages/types'; import { ProfileManager } from '../session/profile_manager/ProfileManager'; import { isUsFromCache } from '../session/utils/User'; import { Action, Reaction } from '../types/Reaction'; import { toLogFormat } from '../types/attachments/Errors'; import { Reactions } from '../util/reactions'; +import { GroupV2Receiver } from './groupv2/handleGroupV2Message'; import { ConversationTypeEnum } from '../models/types'; function cleanAttachment(attachment: any) { @@ -39,19 +41,21 @@ function cleanAttachment(attachment: any) { }; } -function cleanAttachments(decrypted: SignalService.DataMessage) { - const { quote } = decrypted; +function cleanAttachments(decryptedDataMessage: SignalService.DataMessage) { + const { quote } = decryptedDataMessage; // Here we go from binary to string/base64 in all AttachmentPointer digest/key fields - // we do not care about group on Session + // we do not care about group on Session Desktop - decrypted.group = null; + decryptedDataMessage.group = null; // when receiving a message we get keys of attachment as buffer, but we override the data with the decrypted string instead. // TODO it would be nice to get rid of that as any here, but not in this PR - decrypted.attachments = (decrypted.attachments || []).map(cleanAttachment) as any; - decrypted.preview = (decrypted.preview || []).map((item: any) => { + decryptedDataMessage.attachments = (decryptedDataMessage.attachments || []).map( + cleanAttachment + ) as any; + decryptedDataMessage.preview = (decryptedDataMessage.preview || []).map((item: any) => { const { image } = item; if (!image) { @@ -98,10 +102,7 @@ export function messageHasVisibleContent(message: SignalService.DataMessage) { ); } -export function cleanIncomingDataMessage( - rawDataMessage: SignalService.DataMessage, - envelope?: EnvelopePlus -) { +export function cleanIncomingDataMessage(rawDataMessage: SignalService.DataMessage) { const FLAGS = SignalService.DataMessage.Flags; // Now that its decrypted, validate the message and clean it up for consumer @@ -133,11 +134,6 @@ export function cleanIncomingDataMessage( } cleanAttachments(rawDataMessage); - // if the decrypted dataMessage timestamp is not set, copy the one from the envelope - if (!isFinite(rawDataMessage?.timestamp) && envelope) { - rawDataMessage.timestamp = envelope.timestamp; - } - return rawDataMessage; } @@ -152,7 +148,6 @@ export function cleanIncomingDataMessage( * * envelope.source is our pubkey (our other device has the same pubkey as us) * * dataMessage.syncTarget is either the group public key OR the private conversation this message is about. */ - export async function handleSwarmDataMessage({ envelope, messageHash, @@ -160,20 +155,35 @@ export async function handleSwarmDataMessage({ senderConversationModel, sentAtTimestamp, expireUpdate, -}: { +}: WithDisappearingMessageUpdate & { envelope: EnvelopePlus; sentAtTimestamp: number; rawDataMessage: SignalService.DataMessage; messageHash: string; senderConversationModel: ConversationModel; - expireUpdate?: DisappearingMessageUpdate; }): Promise { window.log.info('handleSwarmDataMessage'); - const cleanDataMessage = cleanIncomingDataMessage(rawDataMessage, envelope); - // we handle group updates from our other devices in handleClosedGroupControlMessage() + const cleanDataMessage = cleanIncomingDataMessage(rawDataMessage); + + if (cleanDataMessage.groupUpdateMessage) { + await GroupV2Receiver.handleGroupUpdateMessage({ + signatureTimestamp: sentAtTimestamp, + updateMessage: rawDataMessage.groupUpdateMessage as SignalService.GroupUpdateMessage, + source: envelope.source, + senderIdentity: envelope.senderIdentity, + expireUpdate, + messageHash, + }); + // Groups update should always be able to be decrypted as we get the keys before trying to decrypt them. + // If decryption failed once, it will keep failing, so no need to keep it in the cache. + await IncomingMessageCache.removeFromCache({ id: envelope.id }); + return; + } + // we handle legacy group updates from our other devices in handleLegacyClosedGroupControlMessage() if (cleanDataMessage.closedGroupControlMessage) { - await handleClosedGroupControlMessage( + // TODO DEPRECATED + await handleLegacyClosedGroupControlMessage( envelope, cleanDataMessage.closedGroupControlMessage as SignalService.DataMessage.ClosedGroupControlMessage, expireUpdate || null @@ -198,7 +208,7 @@ export async function handleSwarmDataMessage({ if (isSyncedMessage && !isMe) { window?.log?.warn('Got a sync message from someone else than me. Dropping it.'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } const convoIdToAddTheMessageTo = PubKey.removeTextSecurePrefixIfNeeded( @@ -206,10 +216,10 @@ export async function handleSwarmDataMessage({ ); const isGroupMessage = !!envelope.senderIdentity; - const isGroupV3Message = isGroupMessage && PubKey.isClosedGroupV3(envelope.source); + const isGroupV2Message = isGroupMessage && PubKey.is03Pubkey(envelope.source); let typeOfConvo = ConversationTypeEnum.PRIVATE; - if (isGroupV3Message) { - typeOfConvo = ConversationTypeEnum.GROUPV3; + if (isGroupV2Message) { + typeOfConvo = ConversationTypeEnum.GROUPV2; } else if (isGroupMessage) { typeOfConvo = ConversationTypeEnum.GROUP; } @@ -219,7 +229,7 @@ export async function handleSwarmDataMessage({ ); // remove the prefix from the source object so this is correct for all other - const convoToAddMessageTo = await getConversationController().getOrCreateAndWait( + const convoToAddMessageTo = await ConvoHub.use().getOrCreateAndWait( convoIdToAddTheMessageTo, typeOfConvo ); @@ -240,13 +250,14 @@ export async function handleSwarmDataMessage({ } if (!messageHasVisibleContent(cleanDataMessage)) { - await removeFromCache(envelope); + window?.log?.warn(`Message ${getEnvelopeId(envelope)} ignored; it was empty`); + await IncomingMessageCache.removeFromCache(envelope); return; } if (!convoIdToAddTheMessageTo) { window?.log?.error('We cannot handle a message without a conversationId'); - await removeFromCache(envelope); + await IncomingMessageCache.removeFromCache(envelope); return; } @@ -282,7 +293,7 @@ export async function handleSwarmDataMessage({ cleanDataMessage, convoToAddMessageTo, // eslint-disable-next-line @typescript-eslint/no-misused-promises - () => removeFromCache(envelope) + () => IncomingMessageCache.removeFromCache(envelope) ); } diff --git a/ts/receiver/groupv2/handleGroupV2Message.ts b/ts/receiver/groupv2/handleGroupV2Message.ts new file mode 100644 index 0000000000..1ea912b581 --- /dev/null +++ b/ts/receiver/groupv2/handleGroupV2Message.ts @@ -0,0 +1,754 @@ +import { GroupPubkeyType, PubkeyType, WithGroupPubkey } from 'libsession_util_nodejs'; +import { isEmpty, isFinite, isNumber } from 'lodash'; +import { Data } from '../../data/data'; +import { deleteAllMessagesByConvoIdNoConfirmation } from '../../interactions/conversationInteractions'; +import { deleteMessagesFromSwarmOnly } from '../../interactions/conversations/unsendingInteractions'; +import { ConversationTypeEnum } from '../../models/types'; +import { HexString } from '../../node/hexStrings'; +import { SignalService } from '../../protobuf'; +import { getSwarmPollingInstance } from '../../session/apis/snode_api'; +import { ConvoHub } from '../../session/conversations'; +import { getSodiumRenderer } from '../../session/crypto'; +import { WithDisappearingMessageUpdate } from '../../session/disappearing_messages/types'; +import { ClosedGroup } from '../../session/group/closed-group'; +import { PubKey } from '../../session/types'; +import { WithMessageHash } from '../../session/types/with'; +import { UserUtils } from '../../session/utils'; +import { sleepFor } from '../../session/utils/Promise'; +import { ed25519Str, stringToUint8Array } from '../../session/utils/String'; +import { PreConditionFailed } from '../../session/utils/errors'; +import { UserSync } from '../../session/utils/job_runners/jobs/UserSyncJob'; +import { LibSessionUtil } from '../../session/utils/libsession/libsession_utils'; +import { SessionUtilConvoInfoVolatile } from '../../session/utils/libsession/libsession_utils_convo_info_volatile'; +import { groupInfoActions } from '../../state/ducks/metaGroups'; +import { toFixedUint8ArrayOfLength } from '../../types/sqlSharedTypes'; +import { BlockedNumberController } from '../../util'; +import { + MetaGroupWrapperActions, + UserGroupsWrapperActions, +} from '../../webworker/workers/browser/libsession_worker_interface'; +import { sendInviteResponseToGroup } from '../../session/sending/group/GroupInviteResponse'; + +type WithSignatureTimestamp = { signatureTimestamp: number }; +type WithAuthor = { author: PubkeyType }; + +type WithUncheckedSource = { source: string }; +type WithUncheckedSenderIdentity = { senderIdentity: string }; + +type GroupInviteDetails = { + inviteMessage: SignalService.GroupUpdateInviteMessage; +} & WithSignatureTimestamp & + WithAuthor; + +type GroupUpdateGeneric = { + change: Omit; +} & WithSignatureTimestamp & + WithGroupPubkey & + WithAuthor & + WithDisappearingMessageUpdate; + +type GroupUpdateDetails = { + updateMessage: SignalService.GroupUpdateMessage; +} & WithSignatureTimestamp; + +async function getInitializedGroupObject({ + groupPk, + groupName, + inviterIsApproved, + groupSecretKey, +}: { + groupPk: GroupPubkeyType; + groupName: string; + inviterIsApproved: boolean; + groupSecretKey: Uint8Array | null; +}) { + let found = await UserGroupsWrapperActions.getGroup(groupPk); + const wasKicked = found?.kicked || false; + + if (!found) { + found = { + authData: null, + joinedAtSeconds: Date.now(), + name: groupName, + priority: 0, + pubkeyHex: groupPk, + secretKey: null, + kicked: false, + invitePending: true, + destroyed: false, + }; + } + + found.name = groupName; + if (groupSecretKey && !isEmpty(groupSecretKey)) { + found.secretKey = groupSecretKey; + } + + if (inviterIsApproved) { + // pre approve invite to groups when we've already approved the person who invited us + found.invitePending = false; + } else if (wasKicked) { + // when we were kicked and reinvited by someone we do not trust, this conversation should go in the message request. + found.invitePending = true; + } + + if (found.invitePending) { + // we also need to update the DB model, because we like duplicating things + await ConvoHub.use().get(groupPk)?.setIsApproved(false, true); + } + + return { found, wasKicked }; +} + +async function handleGroupUpdateInviteMessage({ + inviteMessage, + author, + signatureTimestamp, +}: GroupInviteDetails) { + const groupPk = inviteMessage.groupSessionId; + if (!PubKey.is03Pubkey(groupPk)) { + return; + } + + if (BlockedNumberController.isBlocked(author)) { + window.log.info( + `received invite to group ${ed25519Str(groupPk)} by blocked user:${ed25519Str( + author + )}... dropping it` + ); + return; + } + + const authorIsApproved = ConvoHub.use().get(author)?.isApproved() || false; + window.log.info( + `handleGroupInviteMessage for ${ed25519Str(groupPk)}, authorIsApproved:${authorIsApproved}` + ); + + const sigValid = await verifySig({ + pubKey: HexString.fromHexStringNoPrefix(groupPk), + signature: inviteMessage.adminSignature, + data: stringToUint8Array(`INVITE${UserUtils.getOurPubKeyStrFromCache()}${signatureTimestamp}`), + }); + + if (!sigValid) { + window.log.warn('received group invite with invalid signature. dropping'); + return; + } + + window.log.debug(`received invite to group ${ed25519Str(groupPk)} by user:${ed25519Str(author)}`); + + const convo = await ConvoHub.use().getOrCreateAndWait(groupPk, ConversationTypeEnum.GROUPV2); + convo.set({ + active_at: signatureTimestamp, + didApproveMe: true, + conversationIdOrigin: author, + }); + + if (inviteMessage.name && isEmpty(convo.getRealSessionUsername())) { + convo.set({ + displayNameInProfile: inviteMessage.name, + }); + } + const userEd25519Secretkey = (await UserUtils.getUserED25519KeyPairBytes()).privKeyBytes; + + const { found, wasKicked } = await getInitializedGroupObject({ + groupPk, + groupName: inviteMessage.name, + groupSecretKey: null, + inviterIsApproved: authorIsApproved, + }); + + // not sure if we should drop it, or set it again? They should be the same anyway + found.authData = inviteMessage.memberAuthData; + + await UserGroupsWrapperActions.setGroup(found); + await UserGroupsWrapperActions.markGroupInvited(groupPk); + // force markedAsUnread to be true so it shows the unread banner (we only show the banner if there are unread messages on at least one msg/group request) + await convo.markAsUnread(true, false); + await convo.commit(); + + await SessionUtilConvoInfoVolatile.insertConvoFromDBIntoWrapperAndRefresh(convo.id); + + if (wasKicked && !found.kicked) { + // we have been reinvited to a group which we had been kicked from. + // Let's empty the conversation again to remove any "you were removed from the group" control message + await deleteAllMessagesByConvoIdNoConfirmation(groupPk); + } + + await MetaGroupWrapperActions.init(groupPk, { + metaDumped: null, + groupEd25519Secretkey: null, + userEd25519Secretkey: toFixedUint8ArrayOfLength(userEd25519Secretkey, 64).buffer, + groupEd25519Pubkey: toFixedUint8ArrayOfLength(HexString.fromHexStringNoPrefix(groupPk), 32) + .buffer, + }); + try { + const verified = await MetaGroupWrapperActions.swarmVerifySubAccount( + groupPk, + inviteMessage.memberAuthData + ); + if (!verified) { + throw new Error('subaccount failed to verify'); + } + } catch (e) { + window.log.warn(`swarmVerifySubAccount failed with: ${e.message}`); + } + + await LibSessionUtil.saveDumpsToDb(UserUtils.getOurPubKeyStrFromCache()); + await UserSync.queueNewJobIfNeeded(); + if (!found.invitePending) { + // if this group should already be polling based on if that author is pre-approved or we've already approved that group from another device. + getSwarmPollingInstance().addGroupId(groupPk, async () => { + // we need to do a first poll to fetch the keys etc before we can send our invite response + // this is pretty hacky, but also an admin seeing a message from that user in the group will mark it as not pending anymore + await sleepFor(2000); + await sendInviteResponseToGroup({ groupPk }); + }); + } +} + +async function verifySig({ + data, + pubKey, + signature, +}: { + data: Uint8Array; + signature: Uint8Array; + pubKey: Uint8Array; +}) { + const sodium = await getSodiumRenderer(); + return sodium.crypto_sign_verify_detached(signature, data, pubKey); +} + +async function handleGroupInfoChangeMessage({ + change, + groupPk, + signatureTimestamp, + author, + expireUpdate, +}: GroupUpdateGeneric) { + const sigValid = await verifySig({ + pubKey: HexString.fromHexStringNoPrefix(groupPk), + signature: change.adminSignature, + data: stringToUint8Array(`INFO_CHANGE${change.type}${signatureTimestamp}`), + }); + window.log.info(`handleGroupInfoChangeMessage for ${ed25519Str(groupPk)}`); + + if (!sigValid) { + window.log.warn('received group info change with invalid signature. dropping'); + return; + } + const convo = ConvoHub.use().get(groupPk); + if (!convo) { + return; + } + + switch (change.type) { + case SignalService.GroupUpdateInfoChangeMessage.Type.NAME: { + await ClosedGroup.addUpdateMessage({ + convo, + diff: { type: 'name', newName: change.updatedName }, + sender: author, + sentAt: signatureTimestamp, + expireUpdate, + markAlreadySent: true, + }); + + break; + } + case SignalService.GroupUpdateInfoChangeMessage.Type.AVATAR: { + await ClosedGroup.addUpdateMessage({ + convo, + diff: { type: 'avatarChange' }, + sender: author, + sentAt: signatureTimestamp, + expireUpdate, + markAlreadySent: true, + }); + break; + } + case SignalService.GroupUpdateInfoChangeMessage.Type.DISAPPEARING_MESSAGES: { + const newTimerSeconds = change.updatedExpiration; + if (isNumber(newTimerSeconds) && isFinite(newTimerSeconds) && newTimerSeconds >= 0) { + await convo.updateExpireTimer({ + providedExpireTimer: newTimerSeconds, + providedSource: author, + providedDisappearingMode: newTimerSeconds > 0 ? 'deleteAfterSend' : 'off', + sentAt: signatureTimestamp, + fromCurrentDevice: false, + fromSync: false, + fromConfigMessage: false, + }); + } + break; + } + default: + return; + } + + convo.set({ + active_at: signatureTimestamp, + }); +} + +async function handleGroupMemberChangeMessage({ + change, + groupPk, + signatureTimestamp, + author, + expireUpdate, +}: GroupUpdateGeneric) { + const convo = ConvoHub.use().get(groupPk); + if (!convo) { + return; + } + window.log.info(`handleGroupMemberChangeMessage for ${ed25519Str(groupPk)}`); + + const sigValid = await verifySig({ + pubKey: HexString.fromHexStringNoPrefix(groupPk), + signature: change.adminSignature, + data: stringToUint8Array(`MEMBER_CHANGE${change.type}${signatureTimestamp}`), + }); + if (!sigValid) { + window.log.warn('received group member change with invalid signature. dropping'); + return; + } + const filteredMemberChange = change.memberSessionIds.filter(PubKey.is05Pubkey); + + if (!filteredMemberChange) { + window.log.info('returning groupUpdate of member change without associated members...'); + + return; + } + const sharedDetails = { + convo, + sender: author, + sentAt: signatureTimestamp, + expireUpdate, + markAlreadySent: true, + }; + + switch (change.type) { + case SignalService.GroupUpdateMemberChangeMessage.Type.ADDED: { + await ClosedGroup.addUpdateMessage({ + diff: { type: 'add', added: filteredMemberChange, withHistory: change.historyShared }, + ...sharedDetails, + }); + + break; + } + case SignalService.GroupUpdateMemberChangeMessage.Type.REMOVED: { + await ClosedGroup.addUpdateMessage({ + diff: { type: 'kicked', kicked: filteredMemberChange }, + ...sharedDetails, + }); + break; + } + case SignalService.GroupUpdateMemberChangeMessage.Type.PROMOTED: { + await ClosedGroup.addUpdateMessage({ + diff: { type: 'promoted', promoted: filteredMemberChange }, + ...sharedDetails, + }); + break; + } + default: + return; + } + + convo.set({ + active_at: signatureTimestamp, + }); +} + +async function handleGroupMemberLeftMessage({ + groupPk, + author, +}: GroupUpdateGeneric) { + // No need to verify sig, the author is already verified with the libsession.decrypt() + const convo = ConvoHub.use().get(groupPk); + if (!convo || !PubKey.is05Pubkey(author)) { + return; + } + window.log.info(`handleGroupMemberLeftMessage for ${ed25519Str(groupPk)}`); + + // this does nothing if we are not an admin + window.inboxStore?.dispatch( + groupInfoActions.handleMemberLeftMessage({ + groupPk, + memberLeft: author, + }) as any + ); +} + +async function handleGroupUpdateMemberLeftNotificationMessage({ + groupPk, + signatureTimestamp, + author, + expireUpdate, +}: GroupUpdateGeneric) { + // No need to verify sig, the author is already verified with the libsession.decrypt() + const convo = ConvoHub.use().get(groupPk); + if (!convo || !PubKey.is05Pubkey(author)) { + return; + } + window.log.info(`handleGroupUpdateMemberLeftNotificationMessage for ${ed25519Str(groupPk)}`); + + await ClosedGroup.addUpdateMessage({ + convo, + diff: { type: 'left', left: [author] }, + sender: author, + sentAt: signatureTimestamp, + expireUpdate, + markAlreadySent: true, + }); + + convo.set({ + active_at: signatureTimestamp, + }); +} + +async function handleGroupDeleteMemberContentMessage({ + groupPk, + signatureTimestamp, + change, + author, +}: GroupUpdateGeneric) { + const convo = ConvoHub.use().get(groupPk); + if (!convo) { + return; + } + window.log.info(`handleGroupDeleteMemberContentMessage for ${ed25519Str(groupPk)}`); + + /** + * When handling a GroupUpdateDeleteMemberContentMessage we need to do a few things. + * When `adminSignature` is empty, + * 1. we only delete the messageHashes which are in the change.messageHashes AND sent by that same author. + * When `adminSignature` is not empty and valid, + * 2. we delete all the messages in the group sent by any of change.memberSessionIds AND + * 3. we delete all the messageHashes in the conversation matching the change.messageHashes (even if not from the right sender) + * + * Note: we never fully delete those messages locally, but only empty them and mark them as deleted with the + * "This message was deleted" placeholder. + * Eventually, we will be able to delete those "deleted by kept locally" messages with placeholders. + */ + + // no adminSignature: this was sent by a non-admin user + if (!change.adminSignature || isEmpty(change.adminSignature)) { + // this is step 1. + const messageModels = await Data.findAllMessageHashesInConversationMatchingAuthor({ + author, + groupPk, + messageHashes: change.messageHashes, + signatureTimestamp, + }); + + // we don't want to hang while for too long here + // processing the handleGroupDeleteMemberContentMessage itself + // (we are running on the receiving pipeline here) + // so network calls are not allowed. + for (let index = 0; index < messageModels.length; index++) { + const messageModel = messageModels[index]; + try { + // eslint-disable-next-line no-await-in-loop + await messageModel.markAsDeleted(); + } catch (e) { + window.log.warn( + `handleGroupDeleteMemberContentMessage markAsDeleted non-admin of ${messageModel.getMessageHash()} failed with`, + e.message + ); + } + } + convo.updateLastMessage(); + + return; + } + + // else case: we have an admin signature to verify + + const sigValid = await verifySig({ + pubKey: HexString.fromHexStringNoPrefix(groupPk), + signature: change.adminSignature, + data: stringToUint8Array( + `DELETE_CONTENT${signatureTimestamp}${change.memberSessionIds.join('')}${change.messageHashes.join('')}` + ), + }); + + if (!sigValid) { + window.log.warn('received group member delete content with invalid signature. dropping'); + return; + } + + const toRemove = change.memberSessionIds.filter(PubKey.is05Pubkey); + + const modelsBySenders = await Data.findAllMessageFromSendersInConversation({ + groupPk, + toRemove, + signatureTimestamp, + }); // this is step 2. + const modelsByHashes = await Data.findAllMessageHashesInConversation({ + groupPk, + messageHashes: change.messageHashes, + signatureTimestamp, + }); // this is step 3. + + // we don't want to hang while for too long here + // processing the handleGroupDeleteMemberContentMessage itself + // (we are running on the receiving pipeline here) + // so network calls are not allowed. + const mergedModels = modelsByHashes.concat(modelsBySenders); + for (let index = 0; index < mergedModels.length; index++) { + const messageModel = mergedModels[index]; + try { + // eslint-disable-next-line no-await-in-loop + await messageModel.markAsDeleted(); + } catch (e) { + window.log.warn( + `handleGroupDeleteMemberContentMessage markAsDeleted non-admin of ${messageModel.getMessageHash()} failed with`, + e.message + ); + } + } + convo.updateLastMessage(); +} + +async function handleGroupUpdateInviteResponseMessage({ + groupPk, + change, + author, +}: Omit< + GroupUpdateGeneric, + 'signatureTimestamp' | 'expireUpdate' +>) { + // no sig verify for this type of message + const convo = ConvoHub.use().get(groupPk); + if (!convo) { + return; + } + window.log.info(`handleGroupUpdateInviteResponseMessage for ${ed25519Str(groupPk)}`); + + if (!change.isApproved) { + window.log.info('got inviteResponse but isApproved is false. Dropping'); + return; + } + + window.inboxStore?.dispatch( + groupInfoActions.inviteResponseReceived({ groupPk, member: author }) as any + ); +} + +async function handleGroupUpdatePromoteMessage({ + change, + author, + signatureTimestamp, +}: Omit, 'groupPk'>) { + const seed = change.groupIdentitySeed; + const sodium = await getSodiumRenderer(); + const groupKeypair = sodium.crypto_sign_seed_keypair(seed); + + const groupPk = `03${HexString.toHexString(groupKeypair.publicKey)}` as GroupPubkeyType; + // we can be invited via a GroupUpdatePromoteMessage as an admin right away, + // so we potentially need to deal with part of the invite process here too. + + if (BlockedNumberController.isBlocked(author)) { + window.log.info( + `received promote to group ${ed25519Str(groupPk)} by blocked user:${ed25519Str( + author + )}... dropping it` + ); + return; + } + + const authorIsApproved = ConvoHub.use().get(author)?.isApproved() || false; + window.log.info( + `received promote to group ${ed25519Str(groupPk)} by author:${ed25519Str(author)}. authorIsApproved:${authorIsApproved} ` + ); + + const convo = await ConvoHub.use().getOrCreateAndWait(groupPk, ConversationTypeEnum.GROUPV2); + convo.set({ + active_at: signatureTimestamp, + didApproveMe: true, + conversationIdOrigin: author, + }); + + if (change.name && isEmpty(convo.getRealSessionUsername())) { + convo.set({ + displayNameInProfile: change.name, + }); + } + const userEd25519Secretkey = (await UserUtils.getUserED25519KeyPairBytes()).privKeyBytes; + + const { found, wasKicked } = await getInitializedGroupObject({ + groupPk, + groupName: change.name, + groupSecretKey: groupKeypair.privateKey, + inviterIsApproved: authorIsApproved, + }); + + await UserGroupsWrapperActions.setGroup(found); + // force markedAsUnread to be true so it shows the unread banner (we only show the banner if there are unread messages on at least one msg/group request) + await convo.markAsUnread(true, false); + await convo.commit(); + + await SessionUtilConvoInfoVolatile.insertConvoFromDBIntoWrapperAndRefresh(convo.id); + + if (wasKicked) { + // we have been reinvited to a group which we had been kicked from. + // Let's empty the conversation again to remove any "you were removed from the group" control message + await deleteAllMessagesByConvoIdNoConfirmation(groupPk); + } + try { + await MetaGroupWrapperActions.init(groupPk, { + metaDumped: null, + groupEd25519Secretkey: groupKeypair.privateKey, + userEd25519Secretkey: toFixedUint8ArrayOfLength(userEd25519Secretkey, 64).buffer, + groupEd25519Pubkey: toFixedUint8ArrayOfLength(HexString.fromHexStringNoPrefix(groupPk), 32) + .buffer, + }); + } catch (e) { + window.log.warn( + `handleGroupUpdatePromoteMessage: init of ${ed25519Str(groupPk)} failed with ${e.message}. Trying to just load admin keys` + ); + try { + await MetaGroupWrapperActions.loadAdminKeys(groupPk, groupKeypair.privateKey); + } catch (e2) { + window.log.warn( + `handleGroupUpdatePromoteMessage: loadAdminKeys of ${ed25519Str(groupPk)} failed with ${e.message}` + ); + } + } + + await LibSessionUtil.saveDumpsToDb(UserUtils.getOurPubKeyStrFromCache()); + await UserSync.queueNewJobIfNeeded(); + if (!found.invitePending) { + // This group should already be polling based on if that author is pre-approved or we've already approved that group from another device. + // Start polling from it, we will mark ourselves as admin once we get the first merge result, if needed. + getSwarmPollingInstance().addGroupId(groupPk); + } +} + +async function handle1o1GroupUpdateMessage( + details: GroupUpdateDetails & + WithUncheckedSource & + WithUncheckedSenderIdentity & + WithDisappearingMessageUpdate & + WithMessageHash +) { + // the message types below are received from our own swarm, so source is the sender, and senderIdentity is empty + + if (details.updateMessage.inviteMessage || details.updateMessage.promoteMessage) { + if (!PubKey.is05Pubkey(details.source)) { + window.log.warn('received group invite/promote with invalid author'); + throw new PreConditionFailed('received group invite/promote with invalid author'); + } + if (details.updateMessage.inviteMessage) { + await handleGroupUpdateInviteMessage({ + inviteMessage: details.updateMessage + .inviteMessage as SignalService.GroupUpdateInviteMessage, + ...details, + author: details.source, + }); + } else if (details.updateMessage.promoteMessage) { + await handleGroupUpdatePromoteMessage({ + change: details.updateMessage.promoteMessage as SignalService.GroupUpdatePromoteMessage, + ...details, + author: details.source, + }); + } + if (details.messageHash && !isEmpty(details.messageHash)) { + const deleted = await deleteMessagesFromSwarmOnly( + [details.messageHash], + UserUtils.getOurPubKeyStrFromCache() + ); + if (!deleted) { + window.log.warn( + `failed to delete invite/promote while processing it in handle1o1GroupUpdateMessage. hash:${details.messageHash}` + ); + } + } + + // returns true for all cases where this message was expected to be a 1o1 message, even if not processed + return true; + } + + return false; +} + +async function handleGroupUpdateMessage( + details: GroupUpdateDetails & + WithUncheckedSource & + WithUncheckedSenderIdentity & + WithDisappearingMessageUpdate & + WithMessageHash +) { + const was1o1Message = await handle1o1GroupUpdateMessage(details); + if (was1o1Message) { + return; + } + + // other messages are received from the groups swarm, so source is the groupPk, and senderIdentity is the author + const author = details.senderIdentity; + const groupPk = details.source; + if (!PubKey.is05Pubkey(author) || !PubKey.is03Pubkey(groupPk)) { + window.log.warn('received group update message with invalid author or groupPk'); + return; + } + const detailsWithContext = { ...details, author, groupPk }; + + if (details.updateMessage.memberChangeMessage) { + await handleGroupMemberChangeMessage({ + change: details.updateMessage + .memberChangeMessage as SignalService.GroupUpdateMemberChangeMessage, + ...detailsWithContext, + }); + return; + } + + if (details.updateMessage.infoChangeMessage) { + await handleGroupInfoChangeMessage({ + change: details.updateMessage.infoChangeMessage as SignalService.GroupUpdateInfoChangeMessage, + ...detailsWithContext, + }); + return; + } + + if (details.updateMessage.memberLeftMessage) { + await handleGroupMemberLeftMessage({ + change: details.updateMessage.memberLeftMessage as SignalService.GroupUpdateMemberLeftMessage, + ...detailsWithContext, + }); + return; + } + + if (details.updateMessage.memberLeftNotificationMessage) { + await handleGroupUpdateMemberLeftNotificationMessage({ + change: details.updateMessage + .memberLeftNotificationMessage as SignalService.GroupUpdateMemberLeftNotificationMessage, + ...detailsWithContext, + }); + return; + } + if (details.updateMessage.deleteMemberContent) { + await handleGroupDeleteMemberContentMessage({ + change: details.updateMessage + .deleteMemberContent as SignalService.GroupUpdateDeleteMemberContentMessage, + ...detailsWithContext, + }); + return; + } + + if (details.updateMessage.inviteResponse) { + await handleGroupUpdateInviteResponseMessage({ + change: details.updateMessage + .inviteResponse as SignalService.GroupUpdateInviteResponseMessage, + ...detailsWithContext, + }); + return; + } + + window.log.warn('received group update of unknown type. Discarding...'); +} + +export const GroupV2Receiver = { + handleGroupUpdateMessage, + handleGroupUpdateInviteResponseMessage, +}; diff --git a/ts/receiver/libsession/handleLibSessionMessage.ts b/ts/receiver/libsession/handleLibSessionMessage.ts new file mode 100644 index 0000000000..e7975c5f3b --- /dev/null +++ b/ts/receiver/libsession/handleLibSessionMessage.ts @@ -0,0 +1,87 @@ +import { EncryptionDomain, GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; +import { isNumber, toNumber } from 'lodash'; +import { ConvoHub } from '../../session/conversations'; +import { LibSodiumWrappers, WithLibSodiumWrappers } from '../../session/crypto'; +import { PubKey } from '../../session/types'; +import { DecryptionFailed, InvalidMessage } from '../../session/utils/errors'; +import { assertUnreachable } from '../../types/sqlSharedTypes'; +import { + MetaGroupWrapperActions, + UserGroupsWrapperActions, +} from '../../webworker/workers/browser/libsession_worker_interface'; + +/** + * Logic for handling the `groupKicked` `LibSessionMessage`, this message should only be processed if it was + * sent after the user joined the group (while unlikely, it's possible to receive this message when re-joining a group after + * previously being kicked in which case we don't want to delete the data). + */ +async function handleLibSessionKickedMessage({ + decrypted, + sodium, + ourPk, + groupPk, +}: { + decrypted: Uint8Array; + sodium: LibSodiumWrappers; + ourPk: PubkeyType; + groupPk: GroupPubkeyType; +}) { + const pubkeyBytesCount = PubKey.PUBKEY_BYTE_COUNT_NO_PREFIX; + if (decrypted.length <= pubkeyBytesCount) { + throw new DecryptionFailed('DecryptionFailed for handleLibSessionKickedMessage'); + } + // pubkey without prefix should be at the start, and current_gen as a string the rest of the content. + const pubkeyEmbedded = decrypted.slice(0, pubkeyBytesCount); + const currentGenStr = sodium.to_string(decrypted.slice(pubkeyBytesCount)); + const currentGenEmbedded = toNumber(currentGenStr); + + if (!isNumber(currentGenEmbedded)) { + throw new InvalidMessage('currentGenEmbedded not a number'); + } + const pubkeyEmbeddedHex = sodium.to_hex(pubkeyEmbedded); + if (ourPk.slice(2) !== pubkeyEmbeddedHex) { + throw new InvalidMessage('embedded pubkey does not match current user pubkey'); + } + + const currentGenFromWrapper = await MetaGroupWrapperActions.keyGetCurrentGen(groupPk); + if (currentGenEmbedded < currentGenFromWrapper) { + throw new InvalidMessage('currentgen in wrapper is higher than the one in the message '); + } + + const groupInUserGroup = await UserGroupsWrapperActions.getGroup(groupPk); + const inviteWasPending = groupInUserGroup?.invitePending || false; + + await ConvoHub.use().deleteGroup(groupPk, { + sendLeaveMessage: false, + fromSyncMessage: false, + deletionType: inviteWasPending ? 'doNotKeep' : 'keepAsKicked', + deleteAllMessagesOnSwarm: false, + forceDestroyForAllMembers: false, + }); +} + +async function handleLibSessionMessage( + opts: { + decrypted: Uint8Array; + domain: EncryptionDomain; + ourPk: PubkeyType; + groupPk: GroupPubkeyType; + } & WithLibSodiumWrappers +) { + switch (opts.domain) { + case 'SessionGroupKickedMessage': + await handleLibSessionKickedMessage(opts); + return; + + default: + assertUnreachable( + opts.domain, + `handleLibSessionMessage unhandled case for domain: ${opts.domain}` + ); + break; + } +} + +export const LibsessionMessageHandler = { + handleLibSessionMessage, +}; diff --git a/ts/receiver/opengroup.ts b/ts/receiver/opengroup.ts index c11ea93de0..6ff24ee406 100644 --- a/ts/receiver/opengroup.ts +++ b/ts/receiver/opengroup.ts @@ -7,7 +7,7 @@ import { SignalService } from '../protobuf'; import { OpenGroupMessageV4 } from '../session/apis/open_group_api/opengroupV2/OpenGroupServerPoller'; import { isUsAnySogsFromCache } from '../session/apis/open_group_api/sogsv3/knownBlindedkeys'; import { getOpenGroupV2ConversationId } from '../session/apis/open_group_api/utils/OpenGroupUtils'; -import { getConversationController } from '../session/conversations'; +import { ConvoHub } from '../session/conversations'; import { removeMessagePadding } from '../session/crypto/BufferPadding'; import { perfEnd, perfStart } from '../session/utils/Performance'; import { fromBase64ToArray } from '../session/utils/String'; @@ -68,12 +68,12 @@ const handleOpenGroupMessage = async ( return; } - if (!getConversationController().get(conversationId)?.isOpenGroupV2()) { + if (!ConvoHub.use().get(conversationId)?.isOpenGroupV2()) { window?.log?.error('Received a message for an unknown convo or not an v2. Skipping'); return; } - const groupConvo = getConversationController().get(conversationId); + const groupConvo = ConvoHub.use().get(conversationId); if (!groupConvo) { window?.log?.warn('Skipping handleJob for unknown convo: ', conversationId); diff --git a/ts/receiver/queuedJob.ts b/ts/receiver/queuedJob.ts index 5f5d272550..fe38120303 100644 --- a/ts/receiver/queuedJob.ts +++ b/ts/receiver/queuedJob.ts @@ -1,13 +1,14 @@ -import _, { isEmpty, isNumber } from 'lodash'; +import _, { isEmpty, isNumber, toNumber } from 'lodash'; import { queueAttachmentDownloads } from './attachments'; import { Data } from '../data/data'; import { ConversationModel } from '../models/conversation'; import { MessageModel } from '../models/message'; -import { getConversationController } from '../session/conversations'; +import { ConvoHub } from '../session/conversations'; import { Quote } from './types'; import { MessageDirection } from '../models/messageType'; +import { ConversationTypeEnum } from '../models/types'; import { SignalService } from '../protobuf'; import { DisappearingMessages } from '../session/disappearing_messages'; import { ProfileManager } from '../session/profile_manager/ProfileManager'; @@ -19,10 +20,11 @@ import { pushQuotedMessageDetails, } from '../state/ducks/conversations'; import { showMessageRequestBannerOutsideRedux } from '../state/ducks/userConfig'; +import { getMemberInviteSentOutsideRedux } from '../state/selectors/groups'; import { getHideMessageRequestBannerOutsideRedux } from '../state/selectors/userConfig'; import { GoogleChrome } from '../util'; import { LinkPreviews } from '../util/linkPreviews'; -import { ConversationTypeEnum } from '../models/types'; +import { GroupV2Receiver } from './groupv2/handleGroupV2Message'; function contentTypeSupported(type: string): boolean { const Chrome = GoogleChrome; @@ -226,6 +228,66 @@ export function toRegularMessage(rawDataMessage: SignalService.DataMessage): Reg }; } +async function toggleMsgRequestBannerIfNeeded( + conversation: ConversationModel, + message: MessageModel, + source: string +) { + if (!conversation.isPrivate() || !message.isIncoming()) { + return; + } + + const incomingMessageCount = await Data.getMessageCountByType( + conversation.id, + MessageDirection.incoming + ); + const isFirstRequestMessage = incomingMessageCount < 2; + if ( + conversation.isIncomingRequest() && + isFirstRequestMessage && + getHideMessageRequestBannerOutsideRedux() + ) { + showMessageRequestBannerOutsideRedux(); + } + + // For edge case when messaging a client that's unable to explicitly send request approvals + if (conversation.isOutgoingRequest()) { + // Conversation was not approved before so a sync is needed + await conversation.addIncomingApprovalMessage(toNumber(message.get('sent_at')) - 1, source); + } + // should only occur after isOutgoing request as it relies on didApproveMe being false. + await conversation.setDidApproveMe(true); +} + +async function handleMessageFromPendingMember( + conversation: ConversationModel, + message: MessageModel, + source: string +) { + const convoId = conversation.id; + if ( + !conversation.isClosedGroupV2() || + !message.isIncoming() || + !conversation.weAreAdminUnblinded() || // this checks on libsession of that group if we are an admin + !conversation.getGroupMembers().includes(source) || // this check that the sender of that message is indeed a member of the group + !PubKey.is03Pubkey(convoId) || + !PubKey.is05Pubkey(source) + ) { + return; + } + + const isMemberInviteSent = getMemberInviteSentOutsideRedux(source, convoId); + if (!isMemberInviteSent) { + return; // nothing else to do + } + // we are an admin and we received a message from a member whose invite is `pending`. Update that member state now and push a change. + await GroupV2Receiver.handleGroupUpdateInviteResponseMessage({ + groupPk: convoId, + author: source, + change: { isApproved: true }, + }); +} + async function handleRegularMessage( conversation: ConversationModel, sendingDeviceConversation: ConversationModel, @@ -234,7 +296,6 @@ async function handleRegularMessage( source: string, messageHash: string ): Promise { - const type = message.get('type'); // this does not trigger a UI update nor write to the db await copyFromQuotedMessage(message, rawDataMessage.quote); @@ -265,34 +326,10 @@ async function handleRegularMessage( await sendingDeviceConversation.updateBlocksSogsMsgReqsTimestamp(updateBlockTimestamp, false); } - if (type === 'incoming') { - if (conversation.isPrivate()) { - const incomingMessageCount = await Data.getMessageCountByType( - conversation.id, - MessageDirection.incoming - ); - const isFirstRequestMessage = incomingMessageCount < 2; - if ( - conversation.isIncomingRequest() && - isFirstRequestMessage && - getHideMessageRequestBannerOutsideRedux() - ) { - showMessageRequestBannerOutsideRedux(); - } + await toggleMsgRequestBannerIfNeeded(conversation, message, source); + await handleMessageFromPendingMember(conversation, message, source); - // For edge case when messaging a client that's unable to explicitly send request approvals - if (conversation.isOutgoingRequest()) { - // Conversation was not approved before so a sync is needed - await conversation.addIncomingApprovalMessage( - _.toNumber(message.get('sent_at')) - 1, - source - ); - } - // should only occur after isOutgoing request as it relies on didApproveMe being false. - await conversation.setDidApproveMe(true); - } - } - const conversationActiveAt = conversation.get('active_at'); + const conversationActiveAt = conversation.getActiveAt(); if ( !conversationActiveAt || conversation.isHidden() || @@ -381,10 +418,11 @@ export async function handleMessageJob( } in conversation ${conversation.idForLogging()}, messageHash:${messageHash}` ); - const sendingDeviceConversation = await getConversationController().getOrCreateAndWait( + const sendingDeviceConversation = await ConvoHub.use().getOrCreateAndWait( source, ConversationTypeEnum.PRIVATE ); + try { messageModel.set({ flags: regularDataMessage.flags }); @@ -445,7 +483,7 @@ export async function handleMessageJob( providedExpireTimer: expireTimerUpdate, providedSource: source, fromSync: source === UserUtils.getOurPubKeyStrFromCache(), - receivedAt: messageModel.get('received_at'), + sentAt: messageModel.get('received_at'), existingMessage: messageModel, shouldCommitConvo: false, fromCurrentDevice: false, @@ -473,7 +511,7 @@ export async function handleMessageJob( // to their source message. conversation.set({ - active_at: Math.max(conversation.get('active_at'), messageModel.get('sent_at') || 0), + active_at: Math.max(conversation.getActiveAt() || 0, messageModel.get('sent_at') || 0), }); // this is a throttled call and will only run once every 1 sec at most conversation.updateLastMessage(); @@ -486,7 +524,7 @@ export async function handleMessageJob( void queueAttachmentDownloads(messageModel, conversation); // Check if we need to update any profile names // the only profile we don't update with what is coming here is ours, - // as our profile is shared across our devices with a ConfigurationMessage + // as our profile is shared across our devices with libsession if (messageModel.isIncoming() && regularDataMessage.profile) { await ProfileManager.updateProfileOfContact( sendingDeviceConversation.id, diff --git a/ts/receiver/receiver.ts b/ts/receiver/receiver.ts index 563368188f..8816d8d061 100644 --- a/ts/receiver/receiver.ts +++ b/ts/receiver/receiver.ts @@ -1,18 +1,21 @@ /* eslint-disable more/no-then */ -import _ from 'lodash'; +import { isEmpty, last, toNumber } from 'lodash'; import { v4 as uuidv4 } from 'uuid'; import { EnvelopePlus } from './types'; -import { addToCache, getAllFromCache, getAllFromCacheForSource, removeFromCache } from './cache'; +import { IncomingMessageCache } from './cache'; // innerHandleSwarmContentMessage is only needed because of code duplication in handleDecryptedEnvelope... import { handleSwarmContentMessage, innerHandleSwarmContentMessage } from './contentMessage'; import { Data } from '../data/data'; import { SignalService } from '../protobuf'; +import { DURATION } from '../session/constants'; +import { PubKey } from '../session/types'; import { StringUtils, UserUtils } from '../session/utils'; import { perfEnd, perfStart } from '../session/utils/Performance'; +import { sleepFor } from '../session/utils/Promise'; import { createTaskWithTimeout } from '../session/utils/TaskWithTimeout'; import { UnprocessedParameter } from '../types/sqlSharedTypes'; import { getEnvelopeId } from './common'; @@ -21,17 +24,65 @@ export { downloadAttachment } from './attachments'; const incomingMessagePromises: Array> = []; +export async function handleSwarmContentDecryptedWithTimeout({ + envelope, + messageHash, + sentAtTimestamp, + contentDecrypted, + messageExpirationFromRetrieve, +}: { + envelope: EnvelopePlus; + messageHash: string; + sentAtTimestamp: number; + contentDecrypted: ArrayBuffer; + messageExpirationFromRetrieve: number | null; +}) { + let taskDone = false; + return Promise.race([ + (async () => { + await sleepFor(1 * DURATION.MINUTES); // 1 minute expiry per message seems more than enough + if (taskDone) { + return; + } + window.log.error( + 'handleSwarmContentDecryptedWithTimeout timer expired for envelope ', + envelope.id + ); + await IncomingMessageCache.removeFromCache(envelope); + })(), + (async () => { + try { + await innerHandleSwarmContentMessage({ + envelope, + messageHash, + contentDecrypted, + sentAtTimestamp, + messageExpirationFromRetrieve, + }); + await IncomingMessageCache.removeFromCache(envelope); + } catch (e) { + window.log.error( + 'handleSwarmContentDecryptedWithTimeout task failed with ', + e.message, + envelope.id + ); + } finally { + taskDone = true; + } + })(), + ]); +} + async function handleSwarmEnvelope( envelope: EnvelopePlus, messageHash: string, messageExpiration: number | null ) { - if (envelope.content && envelope.content.length > 0) { - return handleSwarmContentMessage(envelope, messageHash, messageExpiration); + if (isEmpty(envelope.content)) { + await IncomingMessageCache.removeFromCache(envelope); + throw new Error('Received message with no content'); } - - await removeFromCache(envelope); - throw new Error('Received message with no content'); + return handleSwarmContentMessage(envelope, messageHash, messageExpiration); } class EnvelopeQueue { @@ -78,19 +129,20 @@ function queueSwarmEnvelope( } } +function contentIsEnvelope(content: Uint8Array | EnvelopePlus): content is EnvelopePlus { + return !isEmpty((content as EnvelopePlus).content); +} + async function handleRequestDetail( - plaintext: Uint8Array, + data: Uint8Array | EnvelopePlus, inConversation: string | null, lastPromise: Promise, messageHash: string, messageExpiration: number ): Promise { - const envelope: any = SignalService.Envelope.decode(plaintext); + const envelope: any = contentIsEnvelope(data) ? data : SignalService.Envelope.decode(data); - // After this point, decoding errors are not the server's - // fault, and we should handle them gracefully and tell the - // user they received an invalid message - // The message is for a medium size group + // The message is for a group if (inConversation) { const ourNumber = UserUtils.getOurPubKeyStrFromCache(); const senderIdentity = envelope.source; @@ -104,8 +156,10 @@ async function handleRequestDetail( envelope.source = inConversation; // eslint-disable-next-line no-param-reassign - plaintext = SignalService.Envelope.encode(envelope).finish(); - envelope.senderIdentity = senderIdentity; + data = SignalService.Envelope.encode(envelope).finish(); + if (!PubKey.is03Pubkey(senderIdentity)) { + envelope.senderIdentity = senderIdentity; + } } envelope.id = uuidv4(); @@ -118,7 +172,11 @@ async function handleRequestDetail( // need to handle senderIdentity separately)... perfStart(`addToCache-${envelope.id}`); - await addToCache(envelope, plaintext, messageHash); + await IncomingMessageCache.addToCache( + envelope, + contentIsEnvelope(data) ? data.content : data, + messageHash + ); perfEnd(`addToCache-${envelope.id}`, 'addToCache'); // To ensure that we queue in the same order we receive messages @@ -138,12 +196,12 @@ async function handleRequestDetail( * @param inConversation if the request is related to a group, this will be set to the group pubkey. Otherwise, it is set to null */ export function handleRequest( - plaintext: Uint8Array, + plaintext: EnvelopePlus | Uint8Array, inConversation: string | null, messageHash: string, messageExpiration: number ): void { - const lastPromise = _.last(incomingMessagePromises) || Promise.resolve(); + const lastPromise = last(incomingMessagePromises) || Promise.resolve(); const promise = handleRequestDetail( plaintext, @@ -162,7 +220,7 @@ export function handleRequest( * Used in main_renderer.js */ export async function queueAllCached() { - const items = await getAllFromCache(); + const items = await IncomingMessageCache.getAllFromCache(); await items.reduce(async (promise, item) => { await promise; @@ -171,7 +229,7 @@ export async function queueAllCached() { } export async function queueAllCachedFromSource(source: string) { - const items = await getAllFromCacheForSource(source); + const items = await IncomingMessageCache.getAllFromCacheForSource(source); // queue all cached for this source, but keep the order await items.reduce(async (promise, item) => { @@ -192,14 +250,19 @@ async function queueCached(item: UnprocessedParameter) { // Why do we need to do this??? envelope.senderIdentity = envelope.senderIdentity || item.senderIdentity; - const { decrypted } = item; + // decrypted must be a decryptedContent here (SignalService.Content.parse will be called with it in the pipeline) + const { decrypted: decryptedContentB64 } = item; - if (decrypted) { - const payloadPlaintext = StringUtils.encode(decrypted, 'base64'); + if (decryptedContentB64) { + const contentDecrypted = StringUtils.encode(decryptedContentB64, 'base64'); // TODO we don't store the expiration in the cache, but we want to get rid of the cache at some point - queueDecryptedEnvelope(envelope, payloadPlaintext, envelope.messageHash, null); + queueDecryptedEnvelope({ + envelope, + contentDecrypted, + messageHash: envelope.messageHash, + messageExpirationFromRetrieve: null, + }); } else { - // TODO we don't store the expiration in the cache, but we want to get rid of the cache at some point queueSwarmEnvelope(envelope, envelope.messageHash, null); } } catch (error) { @@ -223,22 +286,27 @@ async function queueCached(item: UnprocessedParameter) { } } -function queueDecryptedEnvelope( - envelope: any, - plaintext: ArrayBuffer, - messageHash: string, - messageExpiration: number | null -) { +function queueDecryptedEnvelope({ + contentDecrypted, + envelope, + messageHash, + messageExpirationFromRetrieve, +}: { + envelope: any; + contentDecrypted: ArrayBuffer; + messageHash: string; + messageExpirationFromRetrieve: number | null; +}) { const id = getEnvelopeId(envelope); window?.log?.info('queueing decrypted envelope', id); - const task = handleDecryptedEnvelope.bind( - null, + const task = handleDecryptedEnvelope.bind(null, { envelope, - plaintext, + contentDecrypted, messageHash, - messageExpiration - ); + messageExpirationFromRetrieve, + }); + const taskWithTimeout = createTaskWithTimeout(task, `queueEncryptedEnvelope ${id}`); try { envelopeQueue.add(taskWithTimeout); @@ -250,23 +318,27 @@ function queueDecryptedEnvelope( } } -async function handleDecryptedEnvelope( - envelope: EnvelopePlus, - plaintext: ArrayBuffer, - messageHash: string, - messageExpirationFromRetrieve: number | null -) { - if (envelope.content) { - const sentAtTimestamp = _.toNumber(envelope.timestamp); - - await innerHandleSwarmContentMessage({ - envelope, - sentAtTimestamp, - plaintext, - messageHash, - messageExpirationFromRetrieve, - }); - } else { - await removeFromCache(envelope); +async function handleDecryptedEnvelope({ + envelope, + messageHash, + contentDecrypted, + messageExpirationFromRetrieve, +}: { + envelope: EnvelopePlus; + contentDecrypted: ArrayBuffer; + messageHash: string; + messageExpirationFromRetrieve: number | null; +}) { + if (!envelope.content) { + await IncomingMessageCache.removeFromCache(envelope); } + const sentAtTimestamp = toNumber(envelope.timestamp); + + await innerHandleSwarmContentMessage({ + envelope, + sentAtTimestamp, + contentDecrypted, + messageHash, + messageExpirationFromRetrieve, + }); } diff --git a/ts/session/apis/file_server_api/FileServerApi.ts b/ts/session/apis/file_server_api/FileServerApi.ts index f9dee5bf96..b2af3b8d75 100644 --- a/ts/session/apis/file_server_api/FileServerApi.ts +++ b/ts/session/apis/file_server_api/FileServerApi.ts @@ -5,8 +5,8 @@ import { batchGlobalIsSuccess, parseBatchGlobalStatusCode, } from '../open_group_api/sogsv3/sogsV3BatchPoll'; -import { GetNetworkTime } from '../snode_api/getNetworkTime'; import { fromUInt8ArrayToBase64 } from '../../utils/String'; +import { NetworkTime } from '../../../util/NetworkTime'; export const fileServerHost = 'filev2.getsession.org'; export const fileServerURL = `http://${fileServerHost}`; @@ -129,7 +129,7 @@ const parseStatusCodeFromOnionRequestV4 = ( export const getLatestReleaseFromFileServer = async ( userEd25519SecretKey: Uint8Array ): Promise => { - const sigTimestampSeconds = GetNetworkTime.getNowWithNetworkOffsetSeconds(); + const sigTimestampSeconds = NetworkTime.getNowWithNetworkOffsetSeconds(); const blindedPkHex = await BlindingActions.blindVersionPubkey({ ed25519SecretKey: userEd25519SecretKey, }); diff --git a/ts/session/apis/open_group_api/opengroupV2/ApiUtil.ts b/ts/session/apis/open_group_api/opengroupV2/ApiUtil.ts index c329f313aa..53857f4e46 100644 --- a/ts/session/apis/open_group_api/opengroupV2/ApiUtil.ts +++ b/ts/session/apis/open_group_api/opengroupV2/ApiUtil.ts @@ -5,7 +5,7 @@ import { updateDefaultRoomsInProgress, } from '../../../../state/ducks/defaultRooms'; import { UserGroupsWrapperActions } from '../../../../webworker/workers/browser/libsession_worker_interface'; -import { getConversationController } from '../../../conversations'; +import { ConvoHub } from '../../../conversations'; import { allowOnlyOneAtATime } from '../../../utils/Promise'; import { getAllRoomInfos } from '../sogsv3/sogsV3RoomInfos'; import { parseOpenGroupV2 } from './JoinOpenGroupV2'; @@ -125,9 +125,7 @@ export function hasExistingOpenGroup(server: string, roomId: string) { const matchingRoom = rooms.find(r => r.roomId === roomId); return Boolean( - matchingRoom && - matchingRoom.conversationId && - getConversationController().get(matchingRoom.conversationId) + matchingRoom && matchingRoom.conversationId && ConvoHub.use().get(matchingRoom.conversationId) ); } diff --git a/ts/session/apis/open_group_api/opengroupV2/JoinOpenGroupV2.ts b/ts/session/apis/open_group_api/opengroupV2/JoinOpenGroupV2.ts index f22844801f..20d36ded57 100644 --- a/ts/session/apis/open_group_api/opengroupV2/JoinOpenGroupV2.ts +++ b/ts/session/apis/open_group_api/opengroupV2/JoinOpenGroupV2.ts @@ -1,6 +1,6 @@ import { OpenGroupV2Room } from '../../../../data/types'; import { ConversationModel } from '../../../../models/conversation'; -import { getConversationController } from '../../../conversations'; +import { ConvoHub } from '../../../conversations'; import { PromiseUtils, ToastUtils } from '../../../utils'; import { forceSyncConfigurationNowIfNeeded } from '../../../utils/sync/syncUtils'; @@ -71,7 +71,7 @@ async function joinOpenGroupV2( const alreadyExist = hasExistingOpenGroup(serverUrl, roomId); const conversationId = getOpenGroupV2ConversationId(serverUrl, roomId); - const existingConvo = getConversationController().get(conversationId); + const existingConvo = ConvoHub.use().get(conversationId); if (alreadyExist) { window?.log?.warn('Skipping join opengroupv2: already exists'); @@ -81,7 +81,7 @@ async function joinOpenGroupV2( // we already have a convo associated with it. Remove everything related to it so we start fresh window?.log?.warn('leaving before rejoining open group v2 room', conversationId); - await getConversationController().deleteCommunity(conversationId, { + await ConvoHub.use().deleteCommunity(conversationId, { fromSyncMessage: true, }); } @@ -152,8 +152,8 @@ export async function joinOpenGroupV2WithUIEvents( } const alreadyExist = hasExistingOpenGroup(parsedRoom.serverUrl, parsedRoom.roomId); const conversationID = getOpenGroupV2ConversationId(parsedRoom.serverUrl, parsedRoom.roomId); - if (alreadyExist || getConversationController().get(conversationID)) { - const existingConvo = getConversationController().get(conversationID); + if (alreadyExist || ConvoHub.use().get(conversationID)) { + const existingConvo = ConvoHub.use().get(conversationID); await existingConvo.setDidApproveMe(true, false); await existingConvo.setIsApproved(true, false); await existingConvo.commit(); diff --git a/ts/session/apis/open_group_api/opengroupV2/OpenGroupManagerV2.ts b/ts/session/apis/open_group_api/opengroupV2/OpenGroupManagerV2.ts index 9822fc1b54..a8cfc2c892 100644 --- a/ts/session/apis/open_group_api/opengroupV2/OpenGroupManagerV2.ts +++ b/ts/session/apis/open_group_api/opengroupV2/OpenGroupManagerV2.ts @@ -1,12 +1,12 @@ /* eslint-disable no-await-in-loop */ /* eslint-disable no-restricted-syntax */ -import { clone, groupBy, isEqual, uniqBy } from 'lodash'; import autoBind from 'auto-bind'; +import { clone, groupBy, isEqual, uniqBy } from 'lodash'; import { OpenGroupData } from '../../../../data/opengroups'; import { ConversationModel } from '../../../../models/conversation'; -import { getConversationController } from '../../../conversations'; +import { ConvoHub } from '../../../conversations'; import { allowOnlyOneAtATime } from '../../../utils/Promise'; import { getAllValidOpenGroupV2ConversationRoomInfos, @@ -57,15 +57,15 @@ export class OpenGroupManagerV2 { publicKey: string ): Promise { // make sure to use the https version of our official sogs - const overridenUrl = + const overriddenUrl = (serverUrl.includes(`://${ourSogsDomainName}`) && !serverUrl.startsWith('https')) || serverUrl.includes(`://${ourSogsLegacyIp}`) ? ourSogsUrl : serverUrl; - const oneAtaTimeStr = `oneAtaTimeOpenGroupV2Join:${overridenUrl}${roomId}`; + const oneAtaTimeStr = `oneAtaTimeOpenGroupV2Join:${overriddenUrl}${roomId}`; return allowOnlyOneAtATime(oneAtaTimeStr, async () => { - return this.attemptConnectionV2(overridenUrl, roomId, publicKey); + return this.attemptConnectionV2(overriddenUrl, roomId, publicKey); }); } @@ -129,6 +129,7 @@ export class OpenGroupManagerV2 { if (this.isPolling) { return; } + const allRoomInfos = await getAllValidOpenGroupV2ConversationRoomInfos(); if (allRoomInfos?.size) { this.addRoomToPolledRooms([...allRoomInfos.values()]); @@ -148,7 +149,7 @@ export class OpenGroupManagerV2 { ): Promise { let conversationId = getOpenGroupV2ConversationId(serverUrl, roomId); - if (getConversationController().get(conversationId)) { + if (ConvoHub.use().get(conversationId)) { // Url incorrect or server not compatible throw new Error(window.i18n('communityJoinedAlready')); } @@ -196,7 +197,7 @@ export class OpenGroupManagerV2 { await OpenGroupData.saveV2OpenGroupRoom(updatedRoom); } - const conversation = await getConversationController().getOrCreateAndWait( + const conversation = await ConvoHub.use().getOrCreateAndWait( conversationId, ConversationTypeEnum.GROUP ); diff --git a/ts/session/apis/open_group_api/opengroupV2/OpenGroupPollingUtils.ts b/ts/session/apis/open_group_api/opengroupV2/OpenGroupPollingUtils.ts index f3695c5864..66a9c0c7d3 100644 --- a/ts/session/apis/open_group_api/opengroupV2/OpenGroupPollingUtils.ts +++ b/ts/session/apis/open_group_api/opengroupV2/OpenGroupPollingUtils.ts @@ -1,13 +1,13 @@ import { compact } from 'lodash'; +import { getSodiumRenderer } from '../../../crypto'; import { OpenGroupData } from '../../../../data/opengroups'; -import { OpenGroupMessageV2 } from './OpenGroupMessageV2'; import { UserUtils } from '../../../utils'; import { fromHexToArray } from '../../../utils/String'; -import { getSodiumRenderer } from '../../../crypto'; import { SogsBlinding } from '../sogsv3/sogsBlinding'; -import { GetNetworkTime } from '../../snode_api/getNetworkTime'; +import { OpenGroupMessageV2 } from './OpenGroupMessageV2'; import { OpenGroupV2Room } from '../../../../data/types'; +import { NetworkTime } from '../../../../util/NetworkTime'; export type OpenGroupRequestHeaders = { 'X-SOGS-Pubkey': string; @@ -43,7 +43,7 @@ const getOurOpenGroupHeaders = async ( const nonce = (await getSodiumRenderer()).randombytes_buf(16); - const timestamp = Math.floor(GetNetworkTime.getNowWithNetworkOffset() / 1000); + const timestamp = Math.floor(NetworkTime.now() / 1000); return SogsBlinding.getOpenGroupHeaders({ signingKeys, serverPK: fromHexToArray(serverPublicKey), @@ -83,7 +83,7 @@ const getAllValidRoomInfos = ( return fetchedInfo; } catch (e) { - window?.log?.warn('failed to fetch roominfos for room', roomId); + window?.log?.warn('failed to fetch room infos for room', roomId); return null; } }) diff --git a/ts/session/apis/open_group_api/sogsv3/knownBlindedkeys.ts b/ts/session/apis/open_group_api/sogsv3/knownBlindedkeys.ts index 491adb440e..1caaa68ae5 100644 --- a/ts/session/apis/open_group_api/sogsv3/knownBlindedkeys.ts +++ b/ts/session/apis/open_group_api/sogsv3/knownBlindedkeys.ts @@ -2,13 +2,13 @@ import { crypto_sign_curve25519_pk_to_ed25519 } from 'curve25519-js'; import { from_hex, to_hex } from 'libsodium-wrappers-sumo'; import { cloneDeep, flatten, isEmpty, isEqual, isString, uniqBy } from 'lodash'; +import { ConvoHub } from '../../../conversations'; import { Data } from '../../../../data/data'; import { OpenGroupData } from '../../../../data/opengroups'; import { KNOWN_BLINDED_KEYS_ITEM } from '../../../../data/settings-key'; import { ConversationModel } from '../../../../models/conversation'; import { roomHasBlindEnabled } from '../../../../types/sqlSharedTypes'; import { Storage } from '../../../../util/storage'; -import { getConversationController } from '../../../conversations'; import { LibSodiumWrappers } from '../../../crypto'; import { KeyPrefixType, PubKey } from '../../../types'; import { UserUtils } from '../../../utils'; @@ -160,10 +160,10 @@ export function tryMatchBlindWithStandardKey( // From the account id (ignoring 05 prefix) we have two possible ed25519 pubkeys; the first is // the positive(which is what Signal's XEd25519 conversion always uses) - const inbin = from_hex(sessionIdNoPrefix); + const inBin = from_hex(sessionIdNoPrefix); // Note: The below method is code we have exposed from the method within the Curve25519-js library // rather than custom code we have written - const xEd25519Key = crypto_sign_curve25519_pk_to_ed25519(inbin); + const xEd25519Key = crypto_sign_curve25519_pk_to_ed25519(inBin); // Blind it: const pk1 = combineKeys(kBytes, xEd25519Key, sodium); @@ -202,7 +202,7 @@ function findNotCachedBlindingMatch( } // we iterate only over the convos private, approved, and which have an unblinded id. - const foundConvoMatchingBlindedPubkey = getConversationController() + const foundConvoMatchingBlindedPubkey = ConvoHub.use() .getConversations() .filter(m => m.isPrivate() && m.isApproved() && !PubKey.isBlinded(m.id)) .find(m => { @@ -238,7 +238,7 @@ export function getUsBlindedInThatServer(convo: ConversationModel | string): str } const convoId = isString(convo) ? convo : convo.id; - if (!getConversationController().get(convoId)?.isOpenGroupV2()) { + if (!ConvoHub.use().get(convoId)?.isOpenGroupV2()) { return undefined; } const room = OpenGroupData.getV2OpenGroupRoom(isString(convo) ? convo : convo.id); @@ -273,14 +273,14 @@ function findNotCachedBlindedConvoFromUnblindedKey( // we iterate only over the convos private, with a blindedId, and active, // so the one to which we sent a message already or received one from outside sogs. const foundConvosForThisServerPk = - getConversationController() + ConvoHub.use() .getConversations() .filter(m => m.isPrivate() && PubKey.isBlinded(m.id) && m.isActive()) .filter(m => { return tryMatchBlindWithStandardKey(unblindedID, m.id, serverPublicKey, sodium); }) || []; - // we should have only one per server, as we gave the serverpubkey and a blindedId is uniq for a serverPk + // we should have only one per server, as we gave the serverPubkey and a blindedId is uniq for a serverPk return foundConvosForThisServerPk; } diff --git a/ts/session/apis/open_group_api/sogsv3/sogsApiV3.ts b/ts/session/apis/open_group_api/sogsv3/sogsApiV3.ts index 93ec7afefb..9b63cd47a3 100644 --- a/ts/session/apis/open_group_api/sogsv3/sogsApiV3.ts +++ b/ts/session/apis/open_group_api/sogsv3/sogsApiV3.ts @@ -8,7 +8,7 @@ import { OpenGroupData } from '../../../../data/opengroups'; import { ConversationModel } from '../../../../models/conversation'; import { handleOpenGroupV4Message } from '../../../../receiver/opengroup'; import { callUtilsWorker } from '../../../../webworker/workers/browser/util_worker_interface'; -import { getConversationController } from '../../../conversations'; +import { ConvoHub } from '../../../conversations'; import { PubKey } from '../../../types'; import { OpenGroupMessageV4, @@ -25,7 +25,7 @@ import { } from './knownBlindedkeys'; import { SogsBlinding } from './sogsBlinding'; import { handleCapabilities } from './sogsCapabilities'; -import { BatchSogsReponse, OpenGroupBatchRow, SubRequestMessagesType } from './sogsV3BatchPoll'; +import { BatchSogsResponse, OpenGroupBatchRow, SubRequestMessagesType } from './sogsV3BatchPoll'; import { Data } from '../../../../data/data'; import { createSwarmMessageSentFromUs } from '../../../../models/messageFactory'; @@ -54,7 +54,7 @@ function getSogsConvoOrReturnEarly(serverUrl: string, roomId: string): Conversat return null; } - const foundConvo = getConversationController().get(convoId); + const foundConvo = ConvoHub.use().get(convoId); if (!foundConvo) { window.log.info('getSogsConvoOrReturnEarly: convo not found: ', convoId); return null; @@ -70,8 +70,8 @@ function getSogsConvoOrReturnEarly(serverUrl: string, roomId: string): Conversat /** * - * Handle the pollinfo from the response of a pysogs. - * Pollinfos contains the subscriberCount (active users), the read, upload and write things we as a user can do. + * Handle the poll info from the response of a pysogs. + * Poll infos contains the subscriberCount (active users), the read, upload and write things we as a user can do. */ async function handlePollInfoResponse( statusCode: number, @@ -174,7 +174,7 @@ const handleSogsV3DeletedMessages = async ( try { const convoId = getOpenGroupV2ConversationId(serverUrl, roomId); - const convo = getConversationController().get(convoId); + const convo = ConvoHub.use().get(convoId); const messageIds = await Data.getMessageIdsFromServerIds(allIdsRemoved, convo.id); allIdsRemoved.forEach(removedId => { @@ -198,15 +198,15 @@ const handleSogsV3DeletedMessages = async ( const handleMessagesResponseV4 = async ( messages: Array, serverUrl: string, - subrequestOption: SubRequestMessagesType + subRequestOption: SubRequestMessagesType ) => { - if (!subrequestOption || !subrequestOption.messages) { - window?.log?.error('handleBatchPollResults - missing fields required for message subresponse'); + if (!subRequestOption || !subRequestOption.messages) { + window?.log?.error('handleBatchPollResults - missing fields required for message subResponse'); return; } try { - const { roomId } = subrequestOption.messages; + const { roomId } = subRequestOption.messages; const stillPolledRooms = OpenGroupData.getV2OpenGroupRoomsByServerUrl(serverUrl); @@ -219,7 +219,7 @@ const handleMessagesResponseV4 = async ( const roomInfos = await getRoomAndUpdateLastFetchTimestamp( convoId, messages, - subrequestOption.messages + subRequestOption.messages ); if (!roomInfos || !roomInfos.conversationId) { return; @@ -243,7 +243,7 @@ const handleMessagesResponseV4 = async ( return true; }); - // Incoming messages from sogvs v3 are returned in descending order from the latest seqno, we need to sort it chronologically + // Incoming messages from sogs v3 are returned in descending order from the latest seqno, we need to sort it chronologically // Incoming messages for sogs v3 have a timestamp in seconds and not ms. // Session works with timestamp in ms, for a lot of things, so first, lets fix this. const messagesWithMsTimestamp = messagesWithoutReactionOnlyUpdates @@ -256,7 +256,7 @@ const handleMessagesResponseV4 = async ( const messagesWithoutDeleted = await handleSogsV3DeletedMessages( messagesWithMsTimestamp, serverUrl, - subrequestOption.messages.roomId + subRequestOption.messages.roomId ); const messagesWithValidSignature = @@ -319,7 +319,7 @@ const handleMessagesResponseV4 = async ( if (messagesWithReactions.length > 0) { const conversationId = getOpenGroupV2ConversationId(serverUrl, roomId); - const groupConvo = getConversationController().get(conversationId); + const groupConvo = ConvoHub.use().get(conversationId); if (groupConvo && groupConvo.isOpenGroupV2()) { for (const messageWithReaction of messagesWithReactions) { if (isEmpty(messageWithReaction.reactions)) { @@ -347,7 +347,7 @@ type InboxOutboxResponseObject = { id: number; // that specific inbox message id sender: string; // blindedPubkey of the sender, the unblinded one is inside message content, encrypted only for our blinded pubkey recipient: string; // blindedPubkey of the recipient, used for outbox messages only - posted_at: number; // timestamp as seconds.microsec + posted_at: number; // timestamp as seconds.microseconds message: string; // base64 data }; @@ -374,7 +374,7 @@ async function handleInboxOutboxMessages( const serverPubkey = roomInfos[0].serverPublicKey; const sodium = await getSodiumRenderer(); - // make sure to add our blindedpubkey to this server in the cache, if it's not already there + // make sure to add our blinded pubkey to this server in the cache, if it's not already there await findCachedOurBlindedPubkeyOrLookItUp(serverPubkey, sodium); for (let index = 0; index < inboxOutboxResponse.length; index++) { @@ -414,15 +414,15 @@ async function handleInboxOutboxMessages( * We will need this to send new message to that user from our second device. */ const recipient = inboxOutboxItem.recipient; - const contentDecoded = SignalService.Content.decode(content); + const contentDecrypted = SignalService.Content.decode(content); // if we already know this user's unblinded pubkey, store the blinded message we sent to that blinded recipient under // the unblinded conversation instead (as we would have merge the blinded one with the other ) const unblindedIDOrBlinded = (await findCachedBlindedMatchOrLookItUp(recipient, serverPubkey, sodium)) || recipient; - if (contentDecoded.dataMessage) { - const outboxConversationModel = await getConversationController().getOrCreateAndWait( + if (contentDecrypted.dataMessage) { + const outboxConversationModel = await ConvoHub.use().getOrCreateAndWait( unblindedIDOrBlinded, ConversationTypeEnum.PRIVATE ); @@ -436,13 +436,13 @@ async function handleInboxOutboxMessages( messageHash: '', sentAt: postedAtInMs, }); - await outboxConversationModel.setOriginConversationID(serverConversationId); + await outboxConversationModel.setOriginConversationID(serverConversationId, true); await handleOutboxMessageModel( msgModel, '', postedAtInMs, - contentDecoded.dataMessage as SignalService.DataMessage, + contentDecrypted.dataMessage as SignalService.DataMessage, outboxConversationModel ); } @@ -470,15 +470,15 @@ async function handleInboxOutboxMessages( }); await findCachedBlindedMatchOrLookItUp(sender, serverPubkey, sodium); } catch (e) { - window.log.warn('tryMatchBlindWithStandardKey could not veriyfy'); + window.log.warn('tryMatchBlindWithStandardKey could not verify'); } await innerHandleSwarmContentMessage({ envelope: builtEnvelope, sentAtTimestamp: postedAtInMs, - plaintext: builtEnvelope.content, + contentDecrypted: builtEnvelope.content, messageHash: '', - messageExpirationFromRetrieve: null, // sogs message do not expire + messageExpirationFromRetrieve: null, // sogs message cannot expire }); } } catch (e) { @@ -495,7 +495,7 @@ async function handleInboxOutboxMessages( const maxInboxOutboxId = inboxOutboxResponse.length ? Math.max(...inboxOutboxResponse.map(inboxOutboxItem => inboxOutboxItem.id)) - : undefined || undefined; + : undefined; // we should probably extract the inboxId & outboxId fetched to another table, as it is server wide and not room specific if (isNumber(maxInboxOutboxId)) { @@ -509,9 +509,9 @@ async function handleInboxOutboxMessages( export const handleBatchPollResults = async ( serverUrl: string, - batchPollResults: BatchSogsReponse, + batchPollResults: BatchSogsResponse, /** using this as explicit way to ensure order */ - subrequestOptionsLookup: Array + subRequestOptionsLookup: Array ) => { // @@: Might not need the explicit type field. // pro: prevents cases where accidentally two fields for the opt. e.g. capability and message fields truthy. @@ -519,7 +519,7 @@ export const handleBatchPollResults = async ( // note: handling capabilities first before handling anything else as it affects how things are handled. - await handleCapabilities(subrequestOptionsLookup, batchPollResults, serverUrl); + await handleCapabilities(subRequestOptionsLookup, batchPollResults, serverUrl); if (batchPollResults && isArray(batchPollResults.body)) { /** @@ -529,10 +529,10 @@ export const handleBatchPollResults = async ( */ for (let index = 0; index < batchPollResults.body.length; index++) { const subResponse = batchPollResults.body[index] as any; - // using subreqOptions as request type lookup, - // assumes batch subresponse order matches the subrequest order - const subrequestOption = subrequestOptionsLookup[index]; - const responseType = subrequestOption.type; + // using subReqOptions as request type lookup, + // assumes batch subResponse order matches the subRequest order + const subRequestOption = subRequestOptionsLookup[index]; + const responseType = subRequestOption.type; switch (responseType) { case 'capabilities': @@ -540,7 +540,7 @@ export const handleBatchPollResults = async ( break; case 'messages': // this will also include deleted messages explicitly with `data: null` & edited messages with a new data field & react changes with data not existing - await handleMessagesResponseV4(subResponse.body, serverUrl, subrequestOption); + await handleMessagesResponseV4(subResponse.body, serverUrl, subRequestOption); break; case 'pollInfo': await handlePollInfoResponse(subResponse.code, subResponse.body, serverUrl); @@ -565,7 +565,7 @@ export const handleBatchPollResults = async ( default: assertUnreachable( responseType, - `No matching subrequest response body for type: "${responseType}"` + `No matching subRequest response body for type: "${responseType}"` ); } } diff --git a/ts/session/apis/open_group_api/sogsv3/sogsCapabilities.ts b/ts/session/apis/open_group_api/sogsv3/sogsCapabilities.ts index 030b0148da..8831cdbe29 100644 --- a/ts/session/apis/open_group_api/sogsv3/sogsCapabilities.ts +++ b/ts/session/apis/open_group_api/sogsv3/sogsCapabilities.ts @@ -1,21 +1,21 @@ import { findIndex } from 'lodash'; import { OpenGroupData } from '../../../../data/opengroups'; import { DecodedResponseBodiesV4 } from '../../../onions/onionv4'; -import { BatchSogsReponse, OpenGroupBatchRow } from './sogsV3BatchPoll'; +import { BatchSogsResponse, OpenGroupBatchRow } from './sogsV3BatchPoll'; import { parseCapabilities } from './sogsV3Capabilities'; /** - * @param subrequestOptionsLookup list of subrequests used for the batch request (order sensitive) + * @param subRequestOptionsLookup list of subRequests used for the batch request (order sensitive) * @param batchPollResults The result from the batch request (order sensitive) */ export const getCapabilitiesFromBatch = ( - subrequestOptionsLookup: Array, + subRequestOptionsLookup: Array, bodies: DecodedResponseBodiesV4 ): Array | null => { const capabilitiesBatchIndex = findIndex( - subrequestOptionsLookup, - (subrequest: OpenGroupBatchRow) => { - return subrequest.type === 'capabilities'; + subRequestOptionsLookup, + (subRequest: OpenGroupBatchRow) => { + return subRequest.type === 'capabilities'; } ); const capabilities: Array | null = @@ -25,25 +25,25 @@ export const getCapabilitiesFromBatch = ( /** using this as explicit way to ensure order */ export const handleCapabilities = async ( - subrequestOptionsLookup: Array, - batchPollResults: BatchSogsReponse, + subRequestOptionsLookup: Array, + batchPollResults: BatchSogsResponse, serverUrl: string // roomId: string ): Promise> => { if (!batchPollResults.body) { return null; } - const capabilities = getCapabilitiesFromBatch(subrequestOptionsLookup, batchPollResults.body); + const capabilities = getCapabilitiesFromBatch(subRequestOptionsLookup, batchPollResults.body); if (!capabilities) { window?.log?.error( - 'Failed capabilities subrequest - cancelling capabilities response handling' + 'Failed capabilities subRequest - cancelling capabilities response handling' ); return null; } // get all v2OpenGroup rooms with the matching serverUrl and set the capabilities. - // TODOLATER: capabilities are shared accross a server, not a room. We should probably move this to the server but we do not a server level currently, just rooms + // TODOLATER: capabilities are shared across a server, not a room. We should probably move this to the server but we do not a server level currently, just rooms const rooms = OpenGroupData.getV2OpenGroupRoomsByServerUrl(serverUrl); diff --git a/ts/session/apis/open_group_api/sogsv3/sogsV3BatchPoll.ts b/ts/session/apis/open_group_api/sogsv3/sogsV3BatchPoll.ts index 90cdf60675..45ade16e39 100644 --- a/ts/session/apis/open_group_api/sogsv3/sogsV3BatchPoll.ts +++ b/ts/session/apis/open_group_api/sogsv3/sogsV3BatchPoll.ts @@ -5,6 +5,7 @@ import { OpenGroupData } from '../../../../data/opengroups'; import { assertUnreachable, roomHasBlindEnabled } from '../../../../types/sqlSharedTypes'; import { Reactions } from '../../../../util/reactions'; import { OnionSending, OnionV4JSONSnodeResponse } from '../../../onions/onionSend'; +import { MethodBatchType } from '../../snode_api/SnodeRequestTypes'; import { OpenGroupPollingUtils, OpenGroupRequestHeaders, @@ -26,11 +27,11 @@ type BatchBodyRequestSharedOptions = { headers?: any; }; -interface BatchJsonSubrequestOptions extends BatchBodyRequestSharedOptions { +interface BatchJsonSubRequestOptions extends BatchBodyRequestSharedOptions { json: object; } -type BatchBodyRequest = BatchJsonSubrequestOptions; +type BatchBodyRequest = BatchJsonSubRequestOptions; type BatchSubRequest = BatchBodyRequest | BatchFetchRequestOptions; @@ -45,7 +46,7 @@ type BatchRequest = { headers: OpenGroupRequestHeaders; }; -export type BatchSogsReponse = { +export type BatchSogsResponse = { status_code: number; body?: Array<{ body: object; code: number; headers?: Record }>; }; @@ -55,8 +56,8 @@ export const sogsBatchSend = async ( roomInfos: Set, abortSignal: AbortSignal, batchRequestOptions: Array, - batchType: 'batch' | 'sequence' -): Promise => { + batchType: MethodBatchType +): Promise => { // getting server pk for room const [roomId] = roomInfos; const fetchedRoomInfo = OpenGroupData.getV2OpenGroupRoomByRoomId({ @@ -64,7 +65,7 @@ export const sogsBatchSend = async ( roomId, }); if (!fetchedRoomInfo || !fetchedRoomInfo?.serverPublicKey) { - window?.log?.warn('Couldnt get fetched info or server public key -- aborting batch request'); + window?.log?.warn("Couldn't get fetched info or server public key -- aborting batch request"); return null; } const { serverPublicKey } = fetchedRoomInfo; @@ -98,29 +99,27 @@ export const sogsBatchSend = async ( }; export function parseBatchGlobalStatusCode( - response?: BatchSogsReponse | OnionV4JSONSnodeResponse | null + response?: BatchSogsResponse | OnionV4JSONSnodeResponse | null ): number | undefined { return response?.status_code; } export function batchGlobalIsSuccess( - response?: BatchSogsReponse | OnionV4JSONSnodeResponse | null + response?: BatchSogsResponse | OnionV4JSONSnodeResponse | null ): boolean { const status = parseBatchGlobalStatusCode(response); return Boolean(status && isNumber(status) && status >= 200 && status <= 300); } -function parseBatchFirstSubStatusCode(response?: BatchSogsReponse | null): number | undefined { +function parseBatchFirstSubStatusCode(response?: BatchSogsResponse | null): number | undefined { return response?.body?.[0].code; } -export function batchFirstSubIsSuccess(response?: BatchSogsReponse | null): boolean { +export function batchFirstSubIsSuccess(response?: BatchSogsResponse | null): boolean { const status = parseBatchFirstSubStatusCode(response); return Boolean(status && isNumber(status) && status >= 200 && status <= 300); } -export type SubrequestOptionType = 'capabilities' | 'messages' | 'pollInfo' | 'inbox'; - export type SubRequestCapabilitiesType = { type: 'capabilities' }; export type SubRequestMessagesObjectType = @@ -232,7 +231,7 @@ export type OpenGroupBatchRow = /** * - * @param options Array of subrequest options to be made. + * @param options Array of subRequest options to be made. */ const makeBatchRequestPayload = ( options: OpenGroupBatchRow @@ -356,9 +355,9 @@ const getBatchRequest = async ( serverPublicKey: string, batchOptions: Array, requireBlinding: boolean, - batchType: 'batch' | 'sequence' + batchType: MethodBatchType ): Promise => { - const batchEndpoint = batchType === 'sequence' ? '/sequence' : '/batch'; + const batchEndpoint = `/${batchType}` as const; const batchMethod = 'POST'; if (!batchOptions || isEmpty(batchOptions)) { return undefined; @@ -398,7 +397,7 @@ const sendSogsBatchRequestOnionV4 = async ( serverPubkey: string, request: BatchRequest, abortSignal: AbortSignal -): Promise => { +): Promise => { const { endpoint, headers, method, body } = request; if (!endpoint.startsWith('/')) { throw new Error('endpoint needs a leading /'); @@ -428,7 +427,7 @@ const sendSogsBatchRequestOnionV4 = async ( return null; } if (isObject(batchResponse.body)) { - return batchResponse as BatchSogsReponse; + return batchResponse as BatchSogsResponse; } window?.log?.warn('sogsbatch: batch response decoded body is not object. Returning null'); diff --git a/ts/session/apis/open_group_api/sogsv3/sogsV3ClearInbox.ts b/ts/session/apis/open_group_api/sogsv3/sogsV3ClearInbox.ts index e4e213179b..e097009d18 100644 --- a/ts/session/apis/open_group_api/sogsv3/sogsV3ClearInbox.ts +++ b/ts/session/apis/open_group_api/sogsv3/sogsV3ClearInbox.ts @@ -1,5 +1,5 @@ import AbortController from 'abort-controller'; -import { getConversationController } from '../../../conversations'; +import { ConvoHub } from '../../../conversations'; import { getOpenGroupV2ConversationId } from '../utils/OpenGroupUtils'; import { batchFirstSubIsSuccess, @@ -7,8 +7,8 @@ import { OpenGroupBatchRow, sogsBatchSend, } from './sogsV3BatchPoll'; -import { OpenGroupRequestCommonType } from '../../../../data/types'; import { PromiseUtils } from '../../../utils'; +import { OpenGroupRequestCommonType } from '../../../../data/types'; type OpenGroupClearInboxResponse = { deleted: number; @@ -18,7 +18,7 @@ export const clearInbox = async (roomInfos: OpenGroupRequestCommonType): Promise let success = false; const conversationId = getOpenGroupV2ConversationId(roomInfos.serverUrl, roomInfos.roomId); - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); if (!conversation) { throw new Error(`clearInbox Matching conversation not found in db ${conversationId}`); diff --git a/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts b/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts index 55f6e59a12..ec9d354ebd 100644 --- a/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts +++ b/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts @@ -5,7 +5,7 @@ import { MIME } from '../../../../types'; import { processNewAttachment } from '../../../../types/MessageAttachment'; import { roomHasBlindEnabled } from '../../../../types/sqlSharedTypes'; import { callUtilsWorker } from '../../../../webworker/workers/browser/util_worker_interface'; -import { getConversationController } from '../../../conversations'; +import { ConvoHub } from '../../../conversations'; import { OnionSending } from '../../../onions/onionSend'; import { allowOnlyOneAtATime } from '../../../utils/Promise'; import { OpenGroupPollingUtils } from '../opengroupV2/OpenGroupPollingUtils'; @@ -85,11 +85,11 @@ export async function sogsV3FetchPreviewAndSaveIt(roomInfos: OpenGroupV2RoomWith const imageIdNumber = toNumber(imageID); const convoId = getOpenGroupV2ConversationId(roomInfos.serverUrl, roomInfos.roomId); - let convo = getConversationController().get(convoId); + let convo = ConvoHub.use().get(convoId); if (!convo) { return; } - let existingImageId = convo.get('avatarImageId'); + let existingImageId = convo.getAvatarImageId(); if (existingImageId === imageIdNumber) { // return early as the imageID about to be downloaded the one already set as avatar is the same. return; @@ -110,11 +110,11 @@ export async function sogsV3FetchPreviewAndSaveIt(roomInfos: OpenGroupV2RoomWith return; } // refresh to make sure the convo was not deleted during the fetch above - convo = getConversationController().get(convoId); + convo = ConvoHub.use().get(convoId); if (!convo) { return; } - existingImageId = convo.get('avatarImageId'); + existingImageId = convo.getAvatarImageId(); if (existingImageId !== imageIdNumber && isFinite(imageIdNumber)) { // we have to trigger an update // write the file to the disk (automatically encrypted), diff --git a/ts/session/apis/open_group_api/sogsv3/sogsV3SendFile.ts b/ts/session/apis/open_group_api/sogsv3/sogsV3SendFile.ts index 6311e308a3..0e07210c5d 100644 --- a/ts/session/apis/open_group_api/sogsv3/sogsV3SendFile.ts +++ b/ts/session/apis/open_group_api/sogsv3/sogsV3SendFile.ts @@ -18,7 +18,7 @@ export const uploadFileToRoomSogs3 = async ( const roomDetails = OpenGroupData.getV2OpenGroupRoomByRoomId(roomInfos); if (!roomDetails || !roomDetails.serverPublicKey) { - window.log.warn('uploadFileOpenGroupV3: roomDetails is invalid'); + window.log.warn('uploadFileOpenGroup: roomDetails is invalid'); return null; } diff --git a/ts/session/apis/open_group_api/utils/OpenGroupUtils.ts b/ts/session/apis/open_group_api/utils/OpenGroupUtils.ts index 7505b98b6d..62d95baf42 100644 --- a/ts/session/apis/open_group_api/utils/OpenGroupUtils.ts +++ b/ts/session/apis/open_group_api/utils/OpenGroupUtils.ts @@ -1,8 +1,8 @@ import { isEmpty } from 'lodash'; import { OpenGroupData } from '../../../../data/opengroups'; +import { ConvoHub } from '../../../conversations'; import { getOpenGroupManager } from '../opengroupV2/OpenGroupManagerV2'; import { SessionUtilUserGroups } from '../../../utils/libsession/libsession_utils_user_groups'; -import { getConversationController } from '../../../conversations'; import { OpenGroupV2Room, OpenGroupRequestCommonType } from '../../../../data/types'; // eslint-disable-next-line prefer-regex-literals @@ -51,7 +51,7 @@ export function getCompleteUrlFromRoom(roomInfos: OpenGroupV2Room) { isEmpty(roomInfos.roomId) || isEmpty(roomInfos.serverPublicKey) ) { - throw new Error('getCompleteUrlFromRoom needs serverPublicKey, roomid and serverUrl to be set'); + throw new Error('getCompleteUrlFromRoom needs serverPublicKey, roomId and serverUrl to be set'); } // serverUrl has the port and protocol already return `${roomInfos.serverUrl}/${roomInfos.roomId}?${publicKeyParam}${roomInfos.serverPublicKey}`; @@ -148,7 +148,7 @@ export async function getAllValidOpenGroupV2ConversationRoomInfos() { /* eslint-disable no-await-in-loop */ await OpenGroupData.removeV2OpenGroupRoom(roomConvoId); getOpenGroupManager().removeRoomFromPolledRooms(infos); - await getConversationController().deleteCommunity(roomConvoId, { + await ConvoHub.use().deleteCommunity(roomConvoId, { fromSyncMessage: false, }); /* eslint-enable no-await-in-loop */ diff --git a/ts/session/apis/seed_node_api/SeedNodeAPI.ts b/ts/session/apis/seed_node_api/SeedNodeAPI.ts index 28dbdabaae..5d788430ca 100644 --- a/ts/session/apis/seed_node_api/SeedNodeAPI.ts +++ b/ts/session/apis/seed_node_api/SeedNodeAPI.ts @@ -1,6 +1,7 @@ import https from 'https'; -import _ from 'lodash'; import tls from 'tls'; + +import _ from 'lodash'; // eslint-disable-next-line import/no-named-default import { default as insecureNodeFetch } from 'node-fetch'; import pRetry from 'p-retry'; diff --git a/ts/session/apis/snode_api/SNodeAPI.ts b/ts/session/apis/snode_api/SNodeAPI.ts index 0af2ca0392..fc40de1ef2 100644 --- a/ts/session/apis/snode_api/SNodeAPI.ts +++ b/ts/session/apis/snode_api/SNodeAPI.ts @@ -1,30 +1,27 @@ /* eslint-disable no-prototype-builtins */ /* eslint-disable no-restricted-syntax */ -import { compact, sample } from 'lodash'; +import { GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; +import { compact, isEmpty } from 'lodash'; import pRetry from 'p-retry'; -import { Snode } from '../../../data/types'; +import { UserGroupsWrapperActions } from '../../../webworker/workers/browser/libsession_worker_interface'; import { getSodiumRenderer } from '../../crypto'; +import { PubKey } from '../../types'; import { StringUtils, UserUtils } from '../../utils'; import { ed25519Str, fromBase64ToArray, fromHexToArray } from '../../utils/String'; -import { doSnodeBatchRequest } from './batchRequest'; -import { getSwarmFor } from './snodePool'; -import { SnodeSignature } from './snodeSignatures'; +import { DeleteAllFromUserNodeSubRequest } from './SnodeRequestTypes'; +import { BatchRequests } from './batchRequest'; +import { DeleteGroupHashesFactory } from './factories/DeleteGroupHashesRequestFactory'; +import { DeleteUserHashesFactory } from './factories/DeleteUserHashesRequestFactory'; +import { SnodePool } from './snodePool'; export const ERROR_CODE_NO_CONNECT = 'ENETUNREACH: No network connection.'; // TODOLATER we should merge those two functions together as they are almost exactly the same const forceNetworkDeletion = async (): Promise | null> => { const sodium = await getSodiumRenderer(); - const userX25519PublicKey = UserUtils.getOurPubKeyStrFromCache(); + const usPk = UserUtils.getOurPubKeyStrFromCache(); - const userED25519KeyPair = await UserUtils.getUserED25519KeyPair(); - - if (!userED25519KeyPair) { - window?.log?.warn('Cannot forceNetworkDeletion, did not find user ed25519 key.'); - return null; - } - const method = 'delete_all' as const; - const namespace = 'all' as const; + const request = new DeleteAllFromUserNodeSubRequest(); try { const maliciousSnodes = await pRetry( @@ -33,144 +30,117 @@ const forceNetworkDeletion = async (): Promise | null> => { window?.log?.warn('forceNetworkDeletion: we are offline.'); return null; } + const snodeToMakeRequestTo = await SnodePool.getNodeFromSwarmOrThrow(usPk); + const builtRequest = await request.build(); + const ret = await BatchRequests.doSnodeBatchRequestNoRetries( + [builtRequest], + snodeToMakeRequestTo, + 10000, + usPk, + false + ); - const userSwarm = await getSwarmFor(userX25519PublicKey); - const snodeToMakeRequestTo: Snode | undefined = sample(userSwarm); - - if (!snodeToMakeRequestTo) { - window?.log?.warn('Cannot forceNetworkDeletion, without a valid swarm node.'); - return null; + if (!ret || !ret?.[0].body || ret[0].code !== 200) { + throw new Error( + `Empty response got for ${request.method} on snode ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )}` + ); } - return pRetry( - async () => { - const signOpts = await SnodeSignature.getSnodeSignatureParams({ - method, - namespace, - pubkey: userX25519PublicKey, - }); - - const ret = await doSnodeBatchRequest( - [{ method, params: { ...signOpts, namespace } }], - snodeToMakeRequestTo, - 10000, - userX25519PublicKey - ); + try { + const firstResultParsedBody = ret[0].body; + const { swarm } = firstResultParsedBody; - if (!ret || !ret?.[0].body || ret[0].code !== 200) { - throw new Error( - `Empty response got for ${method} on snode ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )}` - ); - } - - try { - const firstResultParsedBody = ret[0].body; - const { swarm } = firstResultParsedBody; - - if (!swarm) { - throw new Error( - `Invalid JSON swarm response got for ${method} on snode ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )}, ${firstResultParsedBody}` - ); - } - const swarmAsArray = Object.entries(swarm) as Array>; - if (!swarmAsArray.length) { - throw new Error( - `Invalid JSON swarmAsArray response got for ${method} on snode ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )}, ${firstResultParsedBody}` - ); - } - // results will only contains the snode pubkeys which returned invalid/empty results - const results: Array = compact( - swarmAsArray.map(snode => { - const snodePubkey = snode[0]; - const snodeJson = snode[1]; - - const isFailed = snodeJson.failed || false; - - if (isFailed) { - const reason = snodeJson.reason; - const statusCode = snodeJson.code; - if (reason && statusCode) { - window?.log?.warn( - `Could not ${method} from ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )} due to error: ${reason}: ${statusCode}` - ); - // if we tried to make the delete on a snode not in our swarm, just trigger a pRetry error so the outer block here finds new snodes to make the request to. - if (statusCode === 421) { - throw new pRetry.AbortError( - `421 error on network ${method}. Retrying with a new snode` - ); - } - } else { - window?.log?.warn( - `Could not ${method} from ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )}` - ); - } - return snodePubkey; + if (!swarm) { + throw new Error( + `Invalid JSON swarm response got for ${request.method} on snode ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )}, ${firstResultParsedBody}` + ); + } + const swarmAsArray = Object.entries(swarm) as Array>; + if (!swarmAsArray.length) { + throw new Error( + `Invalid JSON swarmAsArray response got for ${request.method} on snode ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )}, ${firstResultParsedBody}` + ); + } + // results will only contains the snode pubkeys which returned invalid/empty results + const results: Array = compact( + swarmAsArray.map(snode => { + const snodePubkey = snode[0]; + const snodeJson = snode[1]; + + const isFailed = snodeJson.failed || false; + + if (isFailed) { + const reason = snodeJson.reason; + const statusCode = snodeJson.code; + if (reason && statusCode) { + window?.log?.warn( + `Could not ${request.method} from ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )} due to error: ${reason}: ${statusCode}` + ); + // if we tried to make the delete on a snode not in our swarm, just trigger a pRetry error so the outer block here finds new snodes to make the request to. + if (statusCode === 421) { + throw new pRetry.AbortError( + `421 error on network ${request.method}. Retrying with a new snode` + ); } + } else { + window?.log?.warn( + `Could not ${request.method} from ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )}` + ); + } + return snodePubkey; + } - const deletedObj = snodeJson.deleted as Record>; - const hashes: Array = []; + const deletedObj = snodeJson.deleted as Record>; + const hashes: Array = []; - for (const key in deletedObj) { - if (deletedObj.hasOwnProperty(key)) { - hashes.push(...deletedObj[key]); - } - } - const sortedHashes = hashes.sort(); - const signatureSnode = snodeJson.signature as string; - // The signature format is (with sortedHashes accross all namespaces) ( PUBKEY_HEX || TIMESTAMP || DELETEDHASH[0] || ... || DELETEDHASH[N] ) - const dataToVerify = `${userX25519PublicKey}${ - signOpts.timestamp - }${sortedHashes.join('')}`; - - const dataToVerifyUtf8 = StringUtils.encode(dataToVerify, 'utf8'); - const isValid = sodium.crypto_sign_verify_detached( - fromBase64ToArray(signatureSnode), - new Uint8Array(dataToVerifyUtf8), - fromHexToArray(snodePubkey) - ); - if (!isValid) { - return snodePubkey; - } - return null; - }) + for (const key in deletedObj) { + if (deletedObj.hasOwnProperty(key)) { + hashes.push(...deletedObj[key]); + } + } + const sortedHashes = hashes.sort(); + const signatureSnode = snodeJson.signature as string; + // The signature format is (with sortedHashes across all namespaces) ( PUBKEY_HEX || TIMESTAMP || DELETEDHASH[0] || ... || DELETEDHASH[N] ) + const dataToVerify = `${usPk}${builtRequest.params.timestamp}${sortedHashes.join('')}`; + + const dataToVerifyUtf8 = StringUtils.encode(dataToVerify, 'utf8'); + const isValid = sodium.crypto_sign_verify_detached( + fromBase64ToArray(signatureSnode), + new Uint8Array(dataToVerifyUtf8), + fromHexToArray(snodePubkey) ); + if (!isValid) { + return snodePubkey; + } + return null; + }) + ); - return results; - } catch (e) { - throw new Error( - `Invalid JSON response got for ${method} on snode ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )}, ${ret}` - ); - } - }, - { - retries: 1, - minTimeout: SnodeAPI.TEST_getMinTimeout(), - onFailedAttempt: e => { - window?.log?.warn( - `${method} INNER request attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left...` - ); - }, - } - ); + return results; + } catch (e) { + throw new Error( + `Invalid JSON response got for ${request.method} on snode ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )}, ${ret}` + ); + } }, { - retries: 3, + retries: 5, minTimeout: SnodeAPI.TEST_getMinTimeout(), onFailedAttempt: e => { window?.log?.warn( - `${method} OUTER request attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left... ${e.message}` + `${request.method} OUTER request attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left... ${e.message}` ); }, } @@ -178,7 +148,7 @@ const forceNetworkDeletion = async (): Promise | null> => { return maliciousSnodes; } catch (e) { - window?.log?.warn(`failed to ${method} everything on network:`, e); + window?.log?.warn(`failed to ${request.method} everything on network:`, e); return null; } }; @@ -186,165 +156,218 @@ const forceNetworkDeletion = async (): Promise | null> => { const TEST_getMinTimeout = () => 500; /** - * Locally deletes message and deletes message on the network (all nodes that contain the message) + * Delete the specified message hashes from the our own swarm only. + * Note: legacy group did not support removing messages from the swarm. */ -const networkDeleteMessages = async (hashes: Array): Promise | null> => { +const networkDeleteMessageOurSwarm = async ( + messagesHashes: Set, + pubkey: PubkeyType +): Promise => { const sodium = await getSodiumRenderer(); - const userX25519PublicKey = UserUtils.getOurPubKeyStrFromCache(); - - const userED25519KeyPair = await UserUtils.getUserED25519KeyPair(); - - if (!userED25519KeyPair) { - window?.log?.warn('Cannot networkDeleteMessages, did not find user ed25519 key.'); - return null; + if (!PubKey.is05Pubkey(pubkey) || pubkey !== UserUtils.getOurPubKeyStrFromCache()) { + throw new Error('networkDeleteMessageOurSwarm with 05 pk can only for our own swarm'); + } + if (isEmpty(messagesHashes)) { + window.log.info('networkDeleteMessageOurSwarm: messageHashes is empty'); + return true; + } + const messageHashesArr = [...messagesHashes]; + const request = DeleteUserHashesFactory.makeUserHashesToDeleteSubRequest({ messagesHashes }); + if (!request) { + throw new Error('makeUserHashesToDeleteSubRequest returned invalid sub request'); } - const method = 'delete' as const; try { - const maliciousSnodes = await pRetry( + const success = await pRetry( async () => { - const userSwarm = await getSwarmFor(userX25519PublicKey); - const snodeToMakeRequestTo: Snode | undefined = sample(userSwarm); + const snodeToMakeRequestTo = await SnodePool.getNodeFromSwarmOrThrow(request.destination); + + const ret = await BatchRequests.doUnsignedSnodeBatchRequestNoRetries( + [request], + snodeToMakeRequestTo, + 10000, + request.destination, + false + ); - if (!snodeToMakeRequestTo) { - window?.log?.warn('Cannot networkDeleteMessages, without a valid swarm node.'); - return null; + if (!ret || !ret?.[0].body || ret[0].code !== 200) { + throw new Error( + `networkDeleteMessageOurSwarm: Empty response got for ${request.method} on snode ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )} about pk: ${ed25519Str(request.destination)}` + ); } - return pRetry( - async () => { - const signOpts = await SnodeSignature.getSnodeSignatureByHashesParams({ - messages: hashes, - method, - pubkey: userX25519PublicKey, - }); - - const ret = await doSnodeBatchRequest( - [{ method, params: signOpts }], - snodeToMakeRequestTo, - 10000, - userX25519PublicKey + try { + const firstResultParsedBody = ret[0].body; + const { swarm } = firstResultParsedBody; + + if (!swarm) { + throw new Error( + `networkDeleteMessageOurSwarm: Invalid JSON swarm response got for ${request.method} on snode ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )}, ${firstResultParsedBody}` + ); + } + const swarmAsArray = Object.entries(swarm) as Array>; + if (!swarmAsArray.length) { + throw new Error( + `networkDeleteMessageOurSwarm: Invalid JSON swarmAsArray response got for ${request.method} on snode ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )}, ${firstResultParsedBody}` ); + } + // results will only contains the snode pubkeys which returned invalid/empty results + const results: Array = compact( + swarmAsArray.map(snode => { + const snodePubkey = snode[0]; + const snodeJson = snode[1]; + + const isFailed = snodeJson.failed || false; + + if (isFailed) { + const reason = snodeJson.reason; + const statusCode = snodeJson.code; + if (reason && statusCode) { + window?.log?.warn( + `networkDeleteMessageOurSwarm: Could not ${request.method} from ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )} due to error: ${reason}: ${statusCode}` + ); + } else { + window?.log?.warn( + `networkDeleteMessageOurSwarm: Could not ${request.method} from ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )}` + ); + } + return snodePubkey; + } - if (!ret || !ret?.[0].body || ret[0].code !== 200) { - throw new Error( - `Empty response got for ${method} on snode ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )}` + const responseHashes = snodeJson.deleted as Array; + const signatureSnode = snodeJson.signature as string; + // The signature looks like ( PUBKEY_HEX || RMSG[0] || ... || RMSG[N] || DMSG[0] || ... || DMSG[M] ) + const dataToVerify = `${request.destination}${messageHashesArr.join( + '' + )}${responseHashes.join('')}`; + const dataToVerifyUtf8 = StringUtils.encode(dataToVerify, 'utf8'); + const isValid = sodium.crypto_sign_verify_detached( + fromBase64ToArray(signatureSnode), + new Uint8Array(dataToVerifyUtf8), + fromHexToArray(snodePubkey) ); - } - - try { - const firstResultParsedBody = ret[0].body; - const { swarm } = firstResultParsedBody; - - if (!swarm) { - throw new Error( - `Invalid JSON swarm response got for ${method} on snode ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )}, ${firstResultParsedBody}` - ); - } - const swarmAsArray = Object.entries(swarm) as Array>; - if (!swarmAsArray.length) { - throw new Error( - `Invalid JSON swarmAsArray response got for ${method} on snode ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )}, ${firstResultParsedBody}` - ); + if (!isValid) { + return snodePubkey; } - // results will only contains the snode pubkeys which returned invalid/empty results - const results: Array = compact( - swarmAsArray.map(snode => { - const snodePubkey = snode[0]; - const snodeJson = snode[1]; - - const isFailed = snodeJson.failed || false; - - if (isFailed) { - const reason = snodeJson.reason; - const statusCode = snodeJson.code; - if (reason && statusCode) { - window?.log?.warn( - `Could not ${method} from ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )} due to error: ${reason}: ${statusCode}` - ); - // if we tried to make the delete on a snode not in our swarm, just trigger a pRetry error so the outer block here finds new snodes to make the request to. - if (statusCode === 421) { - throw new pRetry.AbortError( - `421 error on network ${method}. Retrying with a new snode` - ); - } - } else { - window?.log?.warn( - `Could not ${method} from ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )}` - ); - } - return snodePubkey; - } + return null; + }) + ); - const responseHashes = snodeJson.deleted as Array; - const signatureSnode = snodeJson.signature as string; - // The signature looks like ( PUBKEY_HEX || RMSG[0] || ... || RMSG[N] || DMSG[0] || ... || DMSG[M] ) - const dataToVerify = `${userX25519PublicKey}${hashes.join( - '' - )}${responseHashes.join('')}`; - const dataToVerifyUtf8 = StringUtils.encode(dataToVerify, 'utf8'); - const isValid = sodium.crypto_sign_verify_detached( - fromBase64ToArray(signatureSnode), - new Uint8Array(dataToVerifyUtf8), - fromHexToArray(snodePubkey) - ); - if (!isValid) { - return snodePubkey; - } - return null; - }) - ); + return isEmpty(results); + } catch (e) { + throw new Error( + `networkDeleteMessageOurSwarm: Invalid JSON response got for ${request.method} on snode ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )}, ${ret}` + ); + } + }, + { + retries: 5, + minTimeout: SnodeAPI.TEST_getMinTimeout(), + onFailedAttempt: e => { + window?.log?.warn( + `networkDeleteMessageOurSwarm: ${request.method} request attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left... ${e.message}` + ); + }, + } + ); - return results; - } catch (e) { - throw new Error( - `Invalid JSON response got for ${method} on snode ${ed25519Str( - snodeToMakeRequestTo.pubkey_ed25519 - )}, ${ret}` - ); - } - }, - { - retries: 3, - minTimeout: SnodeAPI.TEST_getMinTimeout(), - onFailedAttempt: e => { - window?.log?.warn( - `${method} INNER request attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left...` - ); - }, - } + return success; + } catch (e) { + window?.log?.warn( + `networkDeleteMessageOurSwarm: failed to ${request.method} message on network:`, + e + ); + return false; + } +}; + +/** + * Delete the specified message hashes from the 03-group's swarm. + * Returns true when the hashes have been removed successfully. + * Returns false when + * - we don't have the secretKey + * - if one of the hash was already not present in the swarm, + * - if the request failed too many times + */ +const networkDeleteMessagesForGroup = async ( + messagesHashes: Set, + groupPk: GroupPubkeyType +): Promise => { + if (!PubKey.is03Pubkey(groupPk)) { + throw new Error('networkDeleteMessagesForGroup with 05 pk can only delete for ourself'); + } + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!group || !group.secretKey || isEmpty(group.secretKey)) { + window.log.warn( + `networkDeleteMessagesForGroup: not deleting from swarm of 03-group ${messagesHashes.size} hashes as we do not the adminKey` + ); + return false; + } + + try { + const request = DeleteGroupHashesFactory.makeGroupHashesToDeleteSubRequest({ + messagesHashes, + group, + }); + if (!request) { + throw new Error( + 'DeleteGroupHashesFactory.makeGroupHashesToDeleteSubRequest failed to build a request ' + ); + } + + await pRetry( + async () => { + const snodeToMakeRequestTo = await SnodePool.getNodeFromSwarmOrThrow(request.destination); + + const ret = await BatchRequests.doUnsignedSnodeBatchRequestNoRetries( + [request], + snodeToMakeRequestTo, + 10000, + request.destination, + false ); + + if (!ret || !ret?.[0].body || ret[0].code !== 200) { + throw new Error( + `networkDeleteMessagesForGroup: Empty response got for ${request.method} on snode ${ed25519Str( + snodeToMakeRequestTo.pubkey_ed25519 + )} about pk: ${ed25519Str(request.destination)}` + ); + } }, { - retries: 3, + retries: 5, minTimeout: SnodeAPI.TEST_getMinTimeout(), onFailedAttempt: e => { window?.log?.warn( - `${method} OUTER request attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left... ${e.message}` + `networkDeleteMessagesForGroup: ${request.method} request attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left... ${e.message}` ); }, } ); - return maliciousSnodes; + return true; } catch (e) { - window?.log?.warn(`failed to ${method} message on network:`, e); - return null; + window?.log?.warn(`networkDeleteMessagesForGroup: failed to delete messages on network:`, e); + return false; } }; export const SnodeAPI = { TEST_getMinTimeout, - networkDeleteMessages, + networkDeleteMessagesForGroup, + networkDeleteMessageOurSwarm, forceNetworkDeletion, }; diff --git a/ts/session/apis/snode_api/SnodeRequestTypes.ts b/ts/session/apis/snode_api/SnodeRequestTypes.ts index 365f975db4..1f849e2c06 100644 --- a/ts/session/apis/snode_api/SnodeRequestTypes.ts +++ b/ts/session/apis/snode_api/SnodeRequestTypes.ts @@ -1,71 +1,90 @@ -import { SharedConfigMessage } from '../../messages/outgoing/controlMessage/SharedConfigMessage'; -import { SnodeNamespaces } from './namespaces'; +import ByteBuffer from 'bytebuffer'; +import { GroupPubkeyType, PubkeyType, WithGroupPubkey } from 'libsession_util_nodejs'; +import { from_hex } from 'libsodium-wrappers-sumo'; +import { isEmpty, isString } from 'lodash'; +import { AwaitedReturn, assertUnreachable } from '../../../types/sqlSharedTypes'; +import { concatUInt8Array } from '../../crypto'; +import { PubKey } from '../../types'; +import { StringUtils, UserUtils } from '../../utils'; +import { ed25519Str } from '../../utils/String'; +import { + SnodeNamespace, + SnodeNamespaces, + SnodeNamespacesGroup, + SnodeNamespacesGroupConfig, + SnodeNamespacesUser, + SnodeNamespacesUserConfig, +} from './namespaces'; +import { GroupDetailsNeededForSignature, SnodeGroupSignature } from './signature/groupSignature'; +import { SnodeSignature } from './signature/snodeSignatures'; +import { ShortenOrExtend, WithMessagesHashes } from './types'; +import { TTL_DEFAULT } from '../../constants'; +import { NetworkTime } from '../../../util/NetworkTime'; +import { WithSecretKey, WithSignature, WithTimestamp } from '../../types/with'; -export type SwarmForSubRequest = { method: 'get_swarm'; params: { pubkey: string } }; - -type RetrieveMaxCountSize = { max_count?: number; max_size?: number }; -type RetrieveAlwaysNeeded = { - pubkey: string; - namespace: number; - last_hash: string; - timestamp?: number; -}; - -export type RetrievePubkeySubRequestType = { - method: 'retrieve'; - params: { - signature: string; - pubkey_ed25519: string; - namespace: number; - } & RetrieveAlwaysNeeded & - RetrieveMaxCountSize; -}; +type WithMaxSize = { max_size?: number }; +export type WithShortenOrExtend = { shortenOrExtend: 'shorten' | 'extend' | '' }; -/** Those namespaces do not require to be authenticated for storing messages. - * -> 0 is used for our swarm, and anyone needs to be able to send message to us. - * -> -10 is used for legacy closed group and we do not have authentication for them yet (but we will with the new closed groups) - * -> others are currently unused - * +/** + * This is the base sub request class that every other type of request has to extend. */ -// type UnauthenticatedStoreNamespaces = -30 | -20 | -10 | 0 | 10 | 20 | 30; - -export type RetrieveLegacyClosedGroupSubRequestType = { - method: 'retrieve'; - params: { - namespace: SnodeNamespaces.ClosedGroupMessage; // legacy closed groups retrieve are not authenticated because the clients do not have a shared key - } & RetrieveAlwaysNeeded & - RetrieveMaxCountSize; -}; +abstract class SnodeAPISubRequest { + public abstract method: string; -export type RetrieveSubKeySubRequestType = { - method: 'retrieve'; - params: { - subkey: string; // 32-byte hex encoded string - signature: string; - namespace: number; - } & RetrieveAlwaysNeeded & - RetrieveMaxCountSize; -}; + public abstract loggingId(): string; + public abstract getDestination(): PubkeyType | GroupPubkeyType | ''; + /** + * When batch sending an array of requests, we will sort them by this number (the smallest will be put in front and the largest at the end). + * This is needed for sending and polling for 03-group keys for instance. + */ -export type RetrieveSubRequestType = - | RetrieveLegacyClosedGroupSubRequestType - | RetrievePubkeySubRequestType - | RetrieveSubKeySubRequestType - | UpdateExpiryOnNodeSubRequest; + public requestOrder() { + return 0; + } +} /** - * OXEND_REQUESTS + * Retrieve for legacy was not authenticated */ -export type OnsResolveSubRequest = { - method: 'oxend_request'; - params: { - endpoint: 'ons_resolve'; - params: { - type: 0; - name_hash: string; // base64EncodedNameHash +export class RetrieveLegacyClosedGroupSubRequest extends SnodeAPISubRequest { + method = 'retrieve' as const; + public readonly legacyGroupPk: PubkeyType; + public readonly last_hash: string; + public readonly max_size: number | undefined; + public readonly namespace = SnodeNamespaces.LegacyClosedGroup; + + constructor({ + last_hash, + legacyGroupPk, + max_size, + }: WithMaxSize & { last_hash: string; legacyGroupPk: PubkeyType }) { + super(); + this.legacyGroupPk = legacyGroupPk; + this.last_hash = last_hash; + this.max_size = max_size; + } + + public build() { + return { + method: this.method, + params: { + namespace: this.namespace, // legacy closed groups retrieve are not authenticated because the clients do not have a shared key + pubkey: this.legacyGroupPk, + last_hash: this.last_hash, + max_size: this.max_size, + // if we give a timestamp, a signature will be requested by the snode so this request for legacy does not take a timestamp + }, }; - }; -}; + } + + public getDestination() { + return this.legacyGroupPk; + } + + public loggingId(): string { + return `${this.method}-${SnodeNamespace.toRole(this.namespace)}`; + } +} /** * If you are thinking of adding the `limit` field here: don't. @@ -94,119 +113,1240 @@ export type GetServicesNodesFromSeedRequest = { params: FetchSnodeListParams; }; -export type GetServiceNodesSubRequest = { - method: 'oxend_request'; - params: { - endpoint: 'get_service_nodes'; +export class RetrieveUserSubRequest extends SnodeAPISubRequest { + public method = 'retrieve' as const; + public readonly last_hash: string; + public readonly max_size: number | undefined; + public readonly namespace: SnodeNamespacesUser | SnodeNamespacesUserConfig; + + constructor({ + last_hash, + max_size, + namespace, + }: WithMaxSize & { + last_hash: string; + namespace: SnodeNamespacesUser | SnodeNamespacesUserConfig; + }) { + super(); + this.last_hash = last_hash; + this.max_size = max_size; + this.namespace = namespace; + } + + public async build() { + const { pubkey, pubkey_ed25519, signature, timestamp } = + await SnodeSignature.getSnodeSignatureParamsUs({ + method: this.method, + namespace: this.namespace, + }); + + return { + method: this.method, + params: { + namespace: this.namespace, + pubkey, + pubkey_ed25519, + signature, + timestamp, // we give a timestamp to force verification of the signature provided + last_hash: this.last_hash, + max_size: this.max_size, + }, + }; + } + + public getDestination() { + return UserUtils.getOurPubKeyStrFromCache(); + } + + public loggingId(): string { + return `${this.method}-${SnodeNamespace.toRole(this.namespace)}`; + } +} + +/** + * Build and sign a request with either the admin key if we have it, or with our sub account details + */ +export class RetrieveGroupSubRequest extends SnodeAPISubRequest { + public method = 'retrieve' as const; + public readonly last_hash: string; + public readonly max_size: number | undefined; + public readonly namespace: SnodeNamespacesGroup; + public readonly groupDetailsNeededForSignature: GroupDetailsNeededForSignature; + + constructor({ + last_hash, + max_size, + namespace, + groupDetailsNeededForSignature, + }: WithMaxSize & { + last_hash: string; + namespace: SnodeNamespacesGroup; + groupDetailsNeededForSignature: GroupDetailsNeededForSignature | null; + }) { + super(); + this.last_hash = last_hash; + this.max_size = max_size; + this.namespace = namespace; + if (isEmpty(groupDetailsNeededForSignature)) { + throw new Error('groupDetailsNeededForSignature is required'); + } + this.groupDetailsNeededForSignature = groupDetailsNeededForSignature; + } + + public async build() { /** - * If you are thinking of adding the `limit` field here: don't. - * We fetch the full list because we will remove from every cached swarms the snodes not found in that fresh list. - * If the limit was set, we would remove a lot of valid snodes from the swarms we've already fetched. + * This will return the signature details we can use with the admin secretKey if we have it, + * or with the sub account details if we don't. + * If there is no valid groupDetails, this throws */ - params: FetchSnodeListParams; - }; -}; + const sigResult = await SnodeGroupSignature.getSnodeGroupSignature({ + method: this.method, + namespace: this.namespace, + group: this.groupDetailsNeededForSignature, + }); -export type StoreOnNodeParams = { - pubkey: string; - ttl: number; - timestamp: number; - data: string; - namespace: number; - // sig_timestamp?: number; - signature?: string; - pubkey_ed25519?: string; -}; + return { + method: this.method, + params: { + namespace: this.namespace, + ...sigResult, + last_hash: this.last_hash, + max_size: this.max_size, + }, + }; + } -export type StoreOnNodeParamsNoSig = Pick< - StoreOnNodeParams, - 'pubkey' | 'ttl' | 'timestamp' | 'ttl' | 'namespace' -> & { data64: string }; + public getDestination() { + return this.groupDetailsNeededForSignature.pubkeyHex; + } -export type DeleteFromNodeWithTimestampParams = { - timestamp: string | number; - namespace: number | null | 'all'; -} & DeleteSigParameters; -export type DeleteByHashesFromNodeParams = { messages: Array } & DeleteSigParameters; + public loggingId(): string { + return `${this.method}-${SnodeNamespace.toRole(this.namespace)}`; + } -export type StoreOnNodeMessage = { - pubkey: string; - timestamp: number; - namespace: number; - message: SharedConfigMessage; -}; + public override requestOrder() { + if (this.namespace === SnodeNamespaces.ClosedGroupKeys) { + // we want to retrieve the groups keys last + return 10; + } -export type StoreOnNodeSubRequest = { method: 'store'; params: StoreOnNodeParams }; -export type NetworkTimeSubRequest = { method: 'info'; params: object }; + return super.requestOrder(); + } +} -type DeleteSigParameters = { - pubkey: string; - pubkey_ed25519: string; - signature: string; -}; +export class OnsResolveSubRequest extends SnodeAPISubRequest { + public method = 'oxend_request' as const; + public readonly base64EncodedNameHash: string; -export type DeleteAllFromNodeSubRequest = { - method: 'delete_all'; - params: DeleteFromNodeWithTimestampParams; -}; + constructor(base64EncodedNameHash: string) { + super(); + this.base64EncodedNameHash = base64EncodedNameHash; + } -export type DeleteFromNodeSubRequest = { - method: 'delete'; - params: DeleteByHashesFromNodeParams; -}; + public build() { + return { + method: this.method, + params: { + endpoint: 'ons_resolve', + params: { + type: 0, + name_hash: this.base64EncodedNameHash, + }, + }, + }; + } -export type UpdateExpireNodeParams = { - pubkey: string; - pubkey_ed25519: string; - messages: Array; // Must have at least 2 arguments until the next storage server release (check fakeHash) - expiry: number; - signature: string; - extend?: boolean; - shorten?: boolean; -}; + public loggingId(): string { + return `${this.method}`; + } -export type UpdateExpiryOnNodeSubRequest = { - method: 'expire'; - params: UpdateExpireNodeParams; -}; + public getDestination() { + return '' as const; + } +} + +export class GetServiceNodesSubRequest extends SnodeAPISubRequest { + public method = 'oxend_request' as const; + + public build() { + return { + method: this.method, + params: { + /** + * If you are thinking of adding the `limit` field here: don't. + * We fetch the full list because we will remove from every cached swarms the snodes not found in that fresh list. + * If the limit was set, we would remove a lot of valid snodes from the swarms we've already fetched. + */ + endpoint: 'get_service_nodes' as const, + params: { + active_only: true, + fields: { + public_ip: true, + storage_port: true, + pubkey_x25519: true, + pubkey_ed25519: true, + }, + }, + }, + }; + } + + public loggingId(): string { + return `${this.method}`; + } + + public getDestination() { + return '' as const; + } +} + +export class SwarmForSubRequest extends SnodeAPISubRequest { + public method = 'get_swarm' as const; + public readonly destination; + + constructor(pubkey: PubkeyType | GroupPubkeyType) { + super(); + this.destination = pubkey; + } + + public build() { + return { + method: this.method, + params: { + pubkey: this.destination, + params: { + active_only: true, + fields: { + public_ip: true, + storage_port: true, + pubkey_x25519: true, + pubkey_ed25519: true, + }, + }, + }, + } as const; + } + + public loggingId(): string { + return `${this.method}`; + } + + public getDestination() { + return this.destination; + } +} + +export class NetworkTimeSubRequest extends SnodeAPISubRequest { + public method = 'info' as const; + + public build() { + return { + method: this.method, + params: {}, + } as const; + } + + public loggingId(): string { + return `${this.method}`; + } + + public getDestination() { + return '' as const; + } +} + +abstract class AbstractRevokeSubRequest extends SnodeAPISubRequest { + public readonly destination: GroupPubkeyType; + public readonly timestamp: number; + public readonly revokeTokenHex: Array; + protected readonly adminSecretKey: Uint8Array; + + constructor({ + groupPk, + timestamp, + revokeTokenHex, + secretKey, + }: WithGroupPubkey & WithTimestamp & WithSecretKey & { revokeTokenHex: Array }) { + super(); + this.destination = groupPk; + this.timestamp = timestamp; + this.revokeTokenHex = revokeTokenHex; + this.adminSecretKey = secretKey; + if (this.revokeTokenHex.length === 0) { + throw new Error('AbstractRevokeSubRequest needs at least one token to do a change'); + } + } + + public async signWithAdminSecretKey() { + if (!this.adminSecretKey) { + throw new Error('we need an admin secretKey'); + } + const tokensBytes = from_hex(this.revokeTokenHex.join('')); + + const prefix = new Uint8Array(StringUtils.encode(`${this.method}${this.timestamp}`, 'utf8')); + const sigResult = await SnodeGroupSignature.signDataWithAdminSecret( + concatUInt8Array(prefix, tokensBytes), + { secretKey: this.adminSecretKey } + ); + + return sigResult.signature; + } + + public loggingId(): string { + return `${this.method}-${ed25519Str(this.destination)}`; + } + + public getDestination() { + return this.destination; + } +} + +export class SubaccountRevokeSubRequest extends AbstractRevokeSubRequest { + public method = 'revoke_subaccount' as const; + + public async build() { + const signature = await this.signWithAdminSecretKey(); + return { + method: this.method, + params: { + pubkey: this.destination, + signature, + revoke: this.revokeTokenHex, + timestamp: this.timestamp, + }, + }; + } +} + +export class SubaccountUnrevokeSubRequest extends AbstractRevokeSubRequest { + public method = 'unrevoke_subaccount' as const; + + /** + * For Revoke/unrevoke, this needs an admin signature + */ + public async build() { + const signature = await this.signWithAdminSecretKey(); + + return { + method: this.method, + params: { + pubkey: this.destination, + signature, + unrevoke: this.revokeTokenHex, + timestamp: this.timestamp, + }, + }; + } + + public getDestination() { + return this.destination; + } +} + +/** + * The getExpiries request can currently only be used for our own pubkey as we use it to fetch + * the expiries updated by another of our devices. + */ +export class GetExpiriesFromNodeSubRequest extends SnodeAPISubRequest { + public method = 'get_expiries' as const; + public readonly messageHashes: Array; + + constructor(args: WithMessagesHashes) { + super(); + this.messageHashes = args.messagesHashes; + if (this.messageHashes.length === 0) { + window.log.warn(`GetExpiriesFromNodeSubRequest given empty list of messageHashes`); + throw new Error('GetExpiriesFromNodeSubRequest given empty list of messageHashes'); + } + } + /** + * For Revoke/unrevoke, this needs an admin signature + */ + public async build() { + const timestamp = NetworkTime.now(); + + const ourPubKey = UserUtils.getOurPubKeyStrFromCache(); + if (!ourPubKey) { + throw new Error('[GetExpiriesFromNodeSubRequest] No pubkey found'); + } + const signResult = await SnodeSignature.generateGetExpiriesOurSignature({ + timestamp, + messageHashes: this.messageHashes, + }); + + if (!signResult) { + throw new Error( + `[GetExpiriesFromNodeSubRequest] SnodeSignature.generateUpdateExpirySignature returned an empty result ${this.messageHashes}` + ); + } + + return { + method: this.method, + params: { + pubkey: ourPubKey, + pubkey_ed25519: signResult.pubkey_ed25519.toUpperCase(), + signature: signResult.signature, + messages: this.messageHashes, + timestamp, + }, + }; + } + + public loggingId(): string { + return `${this.method}-us`; + } + + public getDestination() { + return UserUtils.getOurPubKeyStrFromCache(); + } +} + +// todo: to use where delete_all is currently manually called +export class DeleteAllFromUserNodeSubRequest extends SnodeAPISubRequest { + public method = 'delete_all' as const; + public readonly namespace = 'all'; // we can only delete_all for all namespaces currently, but the backend allows more + + public async build() { + const signResult = await SnodeSignature.getSnodeSignatureParamsUs({ + method: this.method, + namespace: this.namespace, + }); + + if (!signResult) { + throw new Error( + `[DeleteAllFromUserNodeSubRequest] SnodeSignature.getSnodeSignatureParamsUs returned an empty result` + ); + } + + return { + method: this.method, + params: { + pubkey: signResult.pubkey, + pubkey_ed25519: signResult.pubkey_ed25519.toUpperCase(), + signature: signResult.signature, + timestamp: signResult.timestamp, + namespace: this.namespace, + }, + }; + } + + public loggingId(): string { + return `${this.method}-${this.namespace}`; + } + + public getDestination() { + return UserUtils.getOurPubKeyStrFromCache(); + } +} + +/** + * Delete all the messages and not the config messages for that group 03. + */ +export class DeleteAllFromGroupMsgNodeSubRequest extends SnodeAPISubRequest { + public method = 'delete_all' as const; + public readonly namespace = SnodeNamespaces.ClosedGroupMessages; + public readonly adminSecretKey: Uint8Array; + public readonly destination: GroupPubkeyType; + + constructor(args: WithGroupPubkey & WithSecretKey) { + super(); + this.destination = args.groupPk; + this.adminSecretKey = args.secretKey; + if (isEmpty(this.adminSecretKey)) { + throw new Error('DeleteAllFromGroupMsgNodeSubRequest needs an adminSecretKey'); + } + } + + public async build() { + const signDetails = await SnodeGroupSignature.getSnodeGroupSignature({ + method: this.method, + namespace: this.namespace, + group: { authData: null, pubkeyHex: this.destination, secretKey: this.adminSecretKey }, + }); + + if (!signDetails) { + throw new Error( + `[DeleteAllFromGroupMsgNodeSubRequest] SnodeSignature.getSnodeSignatureParamsUs returned an empty result` + ); + } + return { + method: this.method, + params: { + ...signDetails, + namespace: this.namespace, + }, + }; + } + + public loggingId(): string { + return `${this.method}-${ed25519Str(this.destination)}-${this.namespace}`; + } + + public getDestination() { + return this.destination; + } +} + +export class DeleteHashesFromUserNodeSubRequest extends SnodeAPISubRequest { + public method = 'delete' as const; + public readonly messageHashes: Array; + public readonly destination: PubkeyType; + + constructor(args: WithMessagesHashes) { + super(); + this.messageHashes = args.messagesHashes; + this.destination = UserUtils.getOurPubKeyStrFromCache(); + + if (this.messageHashes.length === 0) { + window.log.warn(`DeleteHashesFromUserNodeSubRequest given empty list of messageHashes`); + throw new Error('DeleteHashesFromUserNodeSubRequest given empty list of messageHashes'); + } + } + + public async build() { + const signResult = await SnodeSignature.getSnodeSignatureByHashesParams({ + method: this.method, + messagesHashes: this.messageHashes, + pubkey: this.destination, + }); + + if (!signResult) { + throw new Error( + `[DeleteHashesFromUserNodeSubRequest] SnodeSignature.getSnodeSignatureParamsUs returned an empty result` + ); + } -export type GetExpiriesNodeParams = { + return { + method: this.method, + params: { + pubkey: signResult.pubkey, + pubkey_ed25519: signResult.pubkey_ed25519, + signature: signResult.signature, + messages: signResult.messages, + // timestamp is not needed for this one as the hashes can be deleted only once + }, + }; + } + + public loggingId(): string { + return `${this.method}-us`; + } + + public getDestination() { + return this.destination; + } +} + +export class DeleteHashesFromGroupNodeSubRequest extends SnodeAPISubRequest { + public method = 'delete' as const; + public readonly messageHashes: Array; + public readonly destination: GroupPubkeyType; + public readonly secretKey: Uint8Array; + + constructor(args: WithMessagesHashes & WithGroupPubkey & WithSecretKey) { + super(); + this.messageHashes = args.messagesHashes; + this.destination = args.groupPk; + this.secretKey = args.secretKey; + if (!this.secretKey || isEmpty(this.secretKey)) { + throw new Error('DeleteHashesFromGroupNodeSubRequest needs a secretKey'); + } + + if (this.messageHashes.length === 0) { + window.log.warn( + `DeleteHashesFromGroupNodeSubRequest given empty list of messageHashes for ${ed25519Str(this.destination)}` + ); + + throw new Error('DeleteHashesFromGroupNodeSubRequest given empty list of messageHashes'); + } + } + + public async build() { + // Note: this request can only be made by an admin and will be denied otherwise, so we make the secretKey mandatory in the constructor. + const signResult = await SnodeGroupSignature.getGroupSignatureByHashesParams({ + method: this.method, + messagesHashes: this.messageHashes, + groupPk: this.destination, + group: { authData: null, pubkeyHex: this.destination, secretKey: this.secretKey }, + }); + + return { + method: this.method, + params: { + ...signResult, + // pubkey_ed25519 is forbidden when doing the request for a group + // timestamp is not needed for this one as the hashes can be deleted only once + }, + }; + } + + public loggingId(): string { + return `${this.method}-${ed25519Str(this.destination)}`; + } + + public getDestination() { + return this.destination; + } +} + +export class UpdateExpiryOnNodeUserSubRequest extends SnodeAPISubRequest { + public method = 'expire' as const; + public readonly messageHashes: Array; + public readonly expiryMs: number; + public readonly shortenOrExtend: ShortenOrExtend; + + constructor(args: WithMessagesHashes & WithShortenOrExtend & { expiryMs: number }) { + super(); + this.messageHashes = args.messagesHashes; + this.expiryMs = args.expiryMs; + this.shortenOrExtend = args.shortenOrExtend; + + if (this.messageHashes.length === 0) { + window.log.warn(`UpdateExpiryOnNodeUserSubRequest given empty list of messageHashes`); + + throw new Error('UpdateExpiryOnNodeUserSubRequest given empty list of messageHashes'); + } + } + + public async build() { + const signResult = await SnodeSignature.generateUpdateExpiryOurSignature({ + shortenOrExtend: this.shortenOrExtend, + messagesHashes: this.messageHashes, + timestamp: this.expiryMs, + }); + + if (!signResult) { + throw new Error( + `[UpdateExpiryOnNodeUserSubRequest] SnodeSignature.getSnodeSignatureParamsUs returned an empty result` + ); + } + + const shortenOrExtend = + this.shortenOrExtend === 'extend' + ? { extend: true } + : this.shortenOrExtend === 'shorten' + ? { shorten: true } + : {}; + + return { + method: this.method, + params: { + pubkey: UserUtils.getOurPubKeyStrFromCache(), + pubkey_ed25519: signResult.pubkey, + signature: signResult.signature, + messages: this.messageHashes, + expiry: this.expiryMs, + ...shortenOrExtend, + }, + }; + } + + public loggingId(): string { + return `${this.method}-us`; + } + + public getDestination() { + return UserUtils.getOurPubKeyStrFromCache(); + } +} + +export class UpdateExpiryOnNodeGroupSubRequest extends SnodeAPISubRequest { + public method = 'expire' as const; + public readonly messageHashes: Array; + public readonly expiryMs: number; + public readonly shortenOrExtend: ShortenOrExtend; + public readonly groupDetailsNeededForSignature: GroupDetailsNeededForSignature; + + constructor( + args: WithMessagesHashes & + WithShortenOrExtend & { + expiryMs: number; + groupDetailsNeededForSignature: GroupDetailsNeededForSignature; + } + ) { + super(); + this.messageHashes = args.messagesHashes; + this.expiryMs = args.expiryMs; + this.shortenOrExtend = args.shortenOrExtend; + this.groupDetailsNeededForSignature = args.groupDetailsNeededForSignature; + + if (this.messageHashes.length === 0) { + window.log.warn( + `UpdateExpiryOnNodeGroupSubRequest given empty list of messageHashes for ${ed25519Str(this.groupDetailsNeededForSignature.pubkeyHex)}` + ); + + throw new Error('UpdateExpiryOnNodeGroupSubRequest given empty list of messageHashes'); + } + } + + public async build() { + const signResult = await SnodeGroupSignature.generateUpdateExpiryGroupSignature({ + shortenOrExtend: this.shortenOrExtend, + messagesHashes: this.messageHashes, + expiryMs: this.expiryMs, + group: this.groupDetailsNeededForSignature, + }); + + if (!signResult) { + throw new Error( + `[UpdateExpiryOnNodeUserSubRequest] SnodeSignature.getSnodeSignatureParamsUs returned an empty result` + ); + } + + const shortenOrExtend = + this.shortenOrExtend === 'extend' + ? { extends: true } + : this.shortenOrExtend === 'shorten' + ? { shorten: true } + : {}; + + return { + method: this.method, + params: { + messages: this.messageHashes, + ...shortenOrExtend, + ...signResult, + + // pubkey_ed25519 is forbidden for the group one + }, + }; + } + + public loggingId(): string { + return `${this.method}-${ed25519Str(this.groupDetailsNeededForSignature.pubkeyHex)}`; + } + + public getDestination() { + return this.groupDetailsNeededForSignature.pubkeyHex; + } +} + +type WithCreatedAtNetworkTimestamp = { createdAtNetworkTimestamp: number }; + +export class StoreGroupMessageSubRequest extends SnodeAPISubRequest { + public method = 'store' as const; + public readonly namespace = SnodeNamespaces.ClosedGroupMessages; + public readonly destination: GroupPubkeyType; + public readonly ttlMs: number; + public readonly encryptedData: Uint8Array; + public readonly dbMessageIdentifier: string | null; + public readonly secretKey: Uint8Array | null; + public readonly authData: Uint8Array | null; + public readonly createdAtNetworkTimestamp: number; + + constructor( + args: WithGroupPubkey & + WithCreatedAtNetworkTimestamp & { + ttlMs: number; + encryptedData: Uint8Array; + dbMessageIdentifier: string | null; + authData: Uint8Array | null; + secretKey: Uint8Array | null; + } + ) { + super(); + this.destination = args.groupPk; + this.ttlMs = args.ttlMs; + this.encryptedData = args.encryptedData; + this.dbMessageIdentifier = args.dbMessageIdentifier; + this.authData = args.authData; + this.secretKey = args.secretKey; + this.createdAtNetworkTimestamp = args.createdAtNetworkTimestamp; + + if (isEmpty(this.encryptedData)) { + throw new Error('this.encryptedData cannot be empty'); + } + if (!PubKey.is03Pubkey(this.destination)) { + throw new Error('StoreGroupMessageSubRequest: group config namespace required a 03 pubkey'); + } + if (isEmpty(this.secretKey) && isEmpty(this.authData)) { + throw new Error('StoreGroupMessageSubRequest needs either authData or secretKey to be set'); + } + if (SnodeNamespace.isGroupConfigNamespace(this.namespace) && isEmpty(this.secretKey)) { + throw new Error( + `StoreGroupMessageSubRequest: group config namespace [${this.namespace}] requires an adminSecretKey` + ); + } + } + + public async build(): Promise<{ + method: 'store'; + params: StoreOnNodeNormalParams; + }> { + const encryptedDataBase64 = ByteBuffer.wrap(this.encryptedData).toString('base64'); + + // this will either sign with our admin key or with the sub account key if the admin one isn't there + const signDetails = await SnodeGroupSignature.getSnodeGroupSignature({ + method: this.method, + namespace: this.namespace, + group: { authData: this.authData, pubkeyHex: this.destination, secretKey: this.secretKey }, + }); + + if (!signDetails) { + throw new Error(`[${this.loggingId()}] sign details is empty result`); + } + + return { + method: this.method, + params: { + namespace: this.namespace, + ttl: this.ttlMs, + data: encryptedDataBase64, + ...signDetails, + }, + }; + } + + public loggingId(): string { + return `${this.method}-${ed25519Str(this.destination)}-${SnodeNamespace.toRole( + this.namespace + )}`; + } + + public getDestination() { + return this.destination; + } +} + +abstract class StoreGroupConfigSubRequest< + T extends SnodeNamespacesGroupConfig | SnodeNamespaces.ClosedGroupRevokedRetrievableMessages, +> extends SnodeAPISubRequest { + public method = 'store' as const; + public readonly namespace: T; + public readonly destination: GroupPubkeyType; + public readonly ttlMs: number; + public readonly encryptedData: Uint8Array; + // this is mandatory for a group config store, if it is null, we throw + public readonly secretKey: Uint8Array | null; + + constructor( + args: WithGroupPubkey & { + namespace: T; + encryptedData: Uint8Array; + secretKey: Uint8Array | null; + } + ) { + super(); + this.namespace = args.namespace; + this.destination = args.groupPk; + this.ttlMs = TTL_DEFAULT.CONFIG_MESSAGE; + this.encryptedData = args.encryptedData; + this.secretKey = args.secretKey; + + if (isEmpty(this.encryptedData)) { + throw new Error('this.encryptedData cannot be empty'); + } + if (!PubKey.is03Pubkey(this.destination)) { + throw new Error('StoreGroupConfigSubRequest: group config namespace required a 03 pubkey'); + } + if (isEmpty(this.secretKey)) { + throw new Error('StoreGroupConfigSubRequest needs secretKey to be set'); + } + } + + public async build(): Promise<{ + method: 'store'; + params: StoreOnNodeNormalParams; + }> { + const encryptedDataBase64 = ByteBuffer.wrap(this.encryptedData).toString('base64'); + + // this will either sign with our admin key or with the sub account key if the admin one isn't there + const signDetails = await SnodeGroupSignature.getSnodeGroupSignature({ + method: this.method, + namespace: this.namespace, + group: { authData: null, pubkeyHex: this.destination, secretKey: this.secretKey }, + }); + + if (!signDetails) { + throw new Error(`[${this.loggingId()}] sign details is empty result`); + } + + return { + method: this.method, + params: { + namespace: this.namespace, + ttl: this.ttlMs, + data: encryptedDataBase64, + ...signDetails, + }, + }; + } + + public getDestination() { + return this.destination; + } + + public loggingId(): string { + return `${this.method}-${ed25519Str(this.destination)}-${SnodeNamespace.toRole( + this.namespace + )}`; + } + + public requestOrder(): number { + if (this.namespace === SnodeNamespaces.ClosedGroupKeys) { + // -10 means that we need this request to be sent before something with an order of 0 for instance + return -10; + } + return super.requestOrder(); + } +} + +export class StoreGroupInfoSubRequest extends StoreGroupConfigSubRequest { + constructor( + args: Omit[0], 'namespace'> + ) { + super({ ...args, namespace: SnodeNamespaces.ClosedGroupInfo }); + } +} +export class StoreGroupMembersSubRequest extends StoreGroupConfigSubRequest { + constructor( + args: Omit[0], 'namespace'> + ) { + super({ ...args, namespace: SnodeNamespaces.ClosedGroupMembers }); + } +} +export class StoreGroupKeysSubRequest extends StoreGroupConfigSubRequest { + constructor( + args: Omit[0], 'namespace'> + ) { + super({ ...args, namespace: SnodeNamespaces.ClosedGroupKeys }); + } +} + +export class StoreGroupRevokedRetrievableSubRequest extends StoreGroupConfigSubRequest { + constructor( + args: Omit[0], 'namespace'> + ) { + super({ ...args, namespace: SnodeNamespaces.ClosedGroupRevokedRetrievableMessages }); + } +} + +export class StoreUserConfigSubRequest extends SnodeAPISubRequest { + public method = 'store' as const; + public readonly namespace: SnodeNamespacesUserConfig; + public readonly ttlMs: number; + public readonly encryptedData: Uint8Array; + public readonly destination: PubkeyType; + + constructor(args: { + namespace: SnodeNamespacesUserConfig; + ttlMs: number; + encryptedData: Uint8Array; + }) { + super(); + this.namespace = args.namespace; + this.ttlMs = args.ttlMs; + this.encryptedData = args.encryptedData; + this.destination = UserUtils.getOurPubKeyStrFromCache(); + + if (isEmpty(this.encryptedData)) { + throw new Error('this.encryptedData cannot be empty'); + } + + if (isEmpty(this.destination)) { + throw new Error('this.destination cannot be empty'); + } + } + + public async build(): Promise<{ + method: 'store'; + params: StoreOnNodeNormalParams; + }> { + const encryptedDataBase64 = ByteBuffer.wrap(this.encryptedData).toString('base64'); + const ourPrivKey = (await UserUtils.getUserED25519KeyPairBytes())?.privKeyBytes; + if (!ourPrivKey) { + throw new Error('getUserED25519KeyPairBytes is empty'); + } + + const signDetails = await SnodeSignature.getSnodeSignatureParamsUs({ + method: this.method, + namespace: this.namespace, + }); + + if (!signDetails) { + throw new Error(`[StoreUserConfigSubRequest] signing returned an empty result`); + } + + return { + method: this.method, + params: { + namespace: this.namespace, + ttl: this.ttlMs, + data: encryptedDataBase64, + ...signDetails, + }, + }; + } + + public loggingId(): string { + return `${this.method}-${ed25519Str(this.destination)}-${SnodeNamespace.toRole( + this.namespace + )}`; + } + + public getDestination() { + return this.destination; + } +} + +/** + * A request to send a message to the default namespace of another user (namespace 0 is not authenticated) + */ +export class StoreUserMessageSubRequest extends SnodeAPISubRequest { + public method = 'store' as const; + public readonly ttlMs: number; + public readonly encryptedData: Uint8Array; + public readonly namespace = SnodeNamespaces.Default; + public readonly destination: PubkeyType; + public readonly dbMessageIdentifier: string | null; + public readonly createdAtNetworkTimestamp: number; + + public readonly plainTextBuffer: Uint8Array | null; + + constructor( + args: WithCreatedAtNetworkTimestamp & { + ttlMs: number; + encryptedData: Uint8Array; + destination: PubkeyType; + dbMessageIdentifier: string | null; + /** + * When we send a message to a 1o1 recipient, we then need to send the same message to our own swarm as a synced message. + * To forward that message, we need the original message data, which is the plainTextBuffer field here. + */ + plainTextBuffer: Uint8Array | null; + } + ) { + super(); + this.ttlMs = args.ttlMs; + this.destination = args.destination; + this.encryptedData = args.encryptedData; + this.plainTextBuffer = args.plainTextBuffer; + this.dbMessageIdentifier = args.dbMessageIdentifier; + this.createdAtNetworkTimestamp = args.createdAtNetworkTimestamp; + + if (isEmpty(this.encryptedData)) { + throw new Error('this.encryptedData cannot be empty'); + } + if (this.plainTextBuffer && !this.plainTextBuffer.length) { + throw new Error('this.plainTextBuffer can be either null or non-empty'); + } + } + + public async build(): Promise<{ + method: 'store'; + params: StoreOnNodeNormalParams; + }> { + const encryptedDataBase64 = ByteBuffer.wrap(this.encryptedData).toString('base64'); + + return { + method: this.method, + params: { + pubkey: this.destination, + timestamp: NetworkTime.now(), + namespace: this.namespace, + ttl: this.ttlMs, + data: encryptedDataBase64, + }, + }; + } + + public loggingId(): string { + return `${this.method}-${ed25519Str(this.destination)}-${SnodeNamespace.toRole( + this.namespace + )}`; + } + + public getDestination() { + return this.destination; + } +} + +/** + * A request to send a message to the default namespace of another user (namespace 0 is not authenticated) + * + * TODO: this is almost an exact match of `StoreUserMessageSubRequest` due to be removed once we get rid of legacy groups. + */ +export class StoreLegacyGroupMessageSubRequest extends SnodeAPISubRequest { + public method = 'store' as const; + public readonly ttlMs: number; + public readonly encryptedData: Uint8Array; + public readonly namespace = SnodeNamespaces.LegacyClosedGroup; + public readonly destination: PubkeyType; + public readonly dbMessageIdentifier: string | null; + public readonly createdAtNetworkTimestamp: number; + + constructor( + args: WithCreatedAtNetworkTimestamp & { + ttlMs: number; + encryptedData: Uint8Array; + destination: PubkeyType; + dbMessageIdentifier: string | null; + } + ) { + super(); + this.ttlMs = args.ttlMs; + this.destination = args.destination; + this.encryptedData = args.encryptedData; + this.dbMessageIdentifier = args.dbMessageIdentifier; + this.createdAtNetworkTimestamp = args.createdAtNetworkTimestamp; + + if (isEmpty(this.encryptedData)) { + throw new Error('this.encryptedData cannot be empty'); + } + } + + public async build(): Promise<{ + method: 'store'; + params: StoreOnNodeNormalParams; + }> { + const encryptedDataBase64 = ByteBuffer.wrap(this.encryptedData).toString('base64'); + + return { + method: this.method, + params: { + // no signature required for a legacy group retrieve/store of message to namespace -10 + pubkey: this.destination, + timestamp: NetworkTime.now(), + namespace: this.namespace, + ttl: this.ttlMs, + data: encryptedDataBase64, + }, + }; + } + + public loggingId(): string { + return `${this.method}-${ed25519Str(this.destination)}-${SnodeNamespace.toRole( + this.namespace + )}`; + } + + public getDestination() { + return this.destination; + } +} + +/** + * When sending group libsession push(), we can also include extra messages to store (update messages, supplemental keys, etc) + */ +export type StoreGroupExtraData = { + networkTimestamp: number; + data: Uint8Array; + ttl: number; + pubkey: GroupPubkeyType; + dbMessageIdentifier: string | null; +} & { namespace: SnodeNamespacesGroupConfig | SnodeNamespaces.ClosedGroupMessages }; + +/** + * STORE SUB REQUESTS + */ +type StoreOnNodeNormalParams = { pubkey: string; - pubkey_ed25519: string; - messages: Array; + ttl: number; timestamp: number; - signature: string; + data: string; + namespace: number; + signature?: string; + pubkey_ed25519?: string; }; -export type GetExpiriesFromNodeSubRequest = { - method: 'get_expiries'; - params: GetExpiriesNodeParams; -}; +type StoreOnNodeSubAccountParams = Pick< + StoreOnNodeNormalParams, + 'data' | 'namespace' | 'ttl' | 'timestamp' +> & + WithSignature & { + pubkey: GroupPubkeyType; + subaccount: string; + subaccount_sig: string; + namespace: SnodeNamespaces.ClosedGroupMessages; // this can only be this one, sub accounts holder can not post to something else atm + // signature is mandatory for sub account + }; + +type StoreOnNodeParams = StoreOnNodeNormalParams | StoreOnNodeSubAccountParams; + +export type MethodBatchType = 'batch' | 'sequence'; // Until the next storage server release is released, we need to have at least 2 hashes in the list for the `get_expiries` AND for the `update_expiries` export const fakeHash = '///////////////////////////////////////////'; -export type OxendSubRequest = OnsResolveSubRequest | GetServiceNodesSubRequest; - -export type SnodeApiSubRequests = - | RetrieveSubRequestType +export type RawSnodeSubRequests = + | RetrieveLegacyClosedGroupSubRequest + | RetrieveUserSubRequest + | RetrieveGroupSubRequest + | StoreGroupInfoSubRequest + | StoreGroupMembersSubRequest + | StoreGroupKeysSubRequest + | StoreGroupMessageSubRequest + | StoreGroupRevokedRetrievableSubRequest + | StoreUserConfigSubRequest | SwarmForSubRequest - | OxendSubRequest - | StoreOnNodeSubRequest + | OnsResolveSubRequest + | GetServiceNodesSubRequest + | StoreUserMessageSubRequest + | StoreLegacyGroupMessageSubRequest | NetworkTimeSubRequest - | DeleteFromNodeSubRequest - | DeleteAllFromNodeSubRequest - | UpdateExpiryOnNodeSubRequest - | GetExpiriesFromNodeSubRequest; + | DeleteHashesFromGroupNodeSubRequest + | DeleteHashesFromUserNodeSubRequest + | DeleteAllFromUserNodeSubRequest + | UpdateExpiryOnNodeUserSubRequest + | UpdateExpiryOnNodeGroupSubRequest + | SubaccountRevokeSubRequest + | SubaccountUnrevokeSubRequest + | GetExpiriesFromNodeSubRequest + | DeleteAllFromGroupMsgNodeSubRequest; + +export type BuiltSnodeSubRequests = AwaitedReturn; + +export function builtRequestToLoggingId(request: BuiltSnodeSubRequests): string { + const { method, params } = request; + switch (method) { + case 'info': + case 'oxend_request': + return `${method}`; + + case 'delete': + case 'expire': + case 'get_expiries': + case 'get_swarm': + case 'revoke_subaccount': + case 'unrevoke_subaccount': { + const isUs = UserUtils.isUsFromCache(params.pubkey); + return `${method}-${isUs ? 'us' : ed25519Str(params.pubkey)}`; + } + case 'delete_all': { + const isUs = UserUtils.isUsFromCache(params.pubkey); + return `${method}-${isUs ? 'us' : ed25519Str(params.pubkey)}-${ + isString(params.namespace) ? params.namespace : SnodeNamespace.toRole(params.namespace) + }}`; + } + + case 'retrieve': + case 'store': { + const isUs = UserUtils.isUsFromCache(params.pubkey); + return `${method}-${isUs ? 'us' : ed25519Str(params.pubkey)}-${SnodeNamespace.toRole( + params.namespace + )}`; + } + default: + assertUnreachable(method, 'should be unreachable case'); + throw new Error('should be unreachable case'); + } +} // eslint-disable-next-line @typescript-eslint/array-type -export type NonEmptyArray = [T, ...T[]]; +type NonEmptyArray = [T, ...T[]]; -export type NotEmptyArrayOfBatchResults = NonEmptyArray<{ +export type BatchResultEntry = { code: number; body: Record; -}>; +}; -export type WithShortenOrExtend = { shortenOrExtend: 'shorten' | 'extend' | '' }; +export type NotEmptyArrayOfBatchResults = NonEmptyArray; export const MAX_SUBREQUESTS_COUNT = 20; + +export type BatchStoreWithExtraParams = + | StoreOnNodeParams + | DeleteHashesFromGroupNodeSubRequest + | DeleteHashesFromUserNodeSubRequest + | SubaccountRevokeSubRequest + | SubaccountUnrevokeSubRequest; diff --git a/ts/session/apis/snode_api/batchRequest.ts b/ts/session/apis/snode_api/batchRequest.ts index a11e714b2c..a5ef36392a 100644 --- a/ts/session/apis/snode_api/batchRequest.ts +++ b/ts/session/apis/snode_api/batchRequest.ts @@ -1,31 +1,47 @@ import { isArray } from 'lodash'; +import { MessageSender } from '../../sending'; import { Snode } from '../../../data/types'; import { SnodeResponseError } from '../../utils/errors'; import { processOnionRequestErrorAtDestination, SnodeResponse } from './onions'; -import { snodeRpc } from './sessionRpc'; +import { SessionRpc } from './sessionRpc'; import { + builtRequestToLoggingId, + BuiltSnodeSubRequests, MAX_SUBREQUESTS_COUNT, + MethodBatchType, NotEmptyArrayOfBatchResults, - SnodeApiSubRequests, + RawSnodeSubRequests, } from './SnodeRequestTypes'; +function logSubRequests(requests: Array) { + return `[${requests.map(builtRequestToLoggingId).join(', ')}]`; +} + /** * This is the equivalent to the batch send on sogs. The target node runs each sub request and returns a list of all the sub status and bodies. * If the global status code is not 200, an exception is thrown. * The body is already parsed from json and is enforced to be an Array of at least one element + * Note: This function does not retry by itself. + * * @param subRequests the list of requests to do * @param targetNode the node to do the request to, once all the onion routing is done * @param timeout the timeout at which we should cancel this request. * @param associatedWith used mostly for handling 421 errors, we need the pubkey the change is associated to * @param method can be either batch or sequence. A batch call will run all calls even if one of them fails. A sequence call will stop as soon as the first one fails */ -export async function doSnodeBatchRequest( - subRequests: Array, +async function doSnodeBatchRequestNoRetries( + subRequests: Array, targetNode: Snode, timeout: number, associatedWith: string | null, - method: 'batch' | 'sequence' = 'batch' + allow401s: boolean, + method: MethodBatchType = 'batch' ): Promise { + window.log.debug( + `doSnodeBatchRequestNoRetries "${method}":`, + JSON.stringify(logSubRequests(subRequests)) + ); + if (subRequests.length > MAX_SUBREQUESTS_COUNT) { window.log.error( `batch subRequests count cannot be more than ${MAX_SUBREQUESTS_COUNT}. Got ${subRequests.length}` @@ -34,17 +50,20 @@ export async function doSnodeBatchRequest( `batch subRequests count cannot be more than ${MAX_SUBREQUESTS_COUNT}. Got ${subRequests.length}` ); } - const result = await snodeRpc({ + const result = await SessionRpc.snodeRpcNoRetries({ method, params: { requests: subRequests }, targetNode, associatedWith, + allow401s, timeout, }); + if (!result) { window?.log?.warn( - `doSnodeBatchRequest - sessionRpc could not talk to ${targetNode.ip}:${targetNode.port}` + `doSnodeBatchRequestNoRetries - sessionRpc could not talk to ${targetNode.ip}:${targetNode.port}` ); + throw new SnodeResponseError( `doSnodeBatchRequest - sessionRpc could not talk to ${targetNode.ip}:${targetNode.port}` ); @@ -60,6 +79,7 @@ export async function doSnodeBatchRequest( body: JSON.stringify(resultRow.body), associatedWith: associatedWith || undefined, destinationSnodeEd25519: targetNode.pubkey_ed25519, + allow401s, }); } } @@ -67,6 +87,37 @@ export async function doSnodeBatchRequest( return decoded; } +/** + * This function can be called to make the sign the subrequests and then call doSnodeBatchRequestNoRetries with the signed requests. + * + * Note: this function does not retry. + * + * @param unsignedSubRequests the unsigned sub requests to make + * @param targetNode the snode to make the request to + * @param timeout the max timeout to wait for a reply + * @param associatedWith the pubkey associated with this request (used to remove snode failing to reply from that users' swarm) + * @param method the type of request to make batch or sequence + * @returns + */ +async function doUnsignedSnodeBatchRequestNoRetries( + unsignedSubRequests: Array, + targetNode: Snode, + timeout: number, + associatedWith: string | null, + allow401s: boolean, + method: MethodBatchType = 'batch' +): Promise { + const signedSubRequests = await MessageSender.signSubRequests(unsignedSubRequests); + return BatchRequests.doSnodeBatchRequestNoRetries( + signedSubRequests, + targetNode, + timeout, + associatedWith, + allow401s, + method + ); +} + /** * Make sure the global batch status code is 200, parse the content as json and return it */ @@ -93,3 +144,8 @@ function decodeBatchRequest(snodeResponse: SnodeResponse): NotEmptyArrayOfBatchR } // "{"results":[{"body":"retrieve signature verification failed","code":401}]}" } + +export const BatchRequests = { + doSnodeBatchRequestNoRetries, + doUnsignedSnodeBatchRequestNoRetries, +}; diff --git a/ts/session/apis/snode_api/expireRequest.ts b/ts/session/apis/snode_api/expireRequest.ts index 2309ff1000..8435a71898 100644 --- a/ts/session/apis/snode_api/expireRequest.ts +++ b/ts/session/apis/snode_api/expireRequest.ts @@ -1,31 +1,19 @@ /* eslint-disable no-restricted-syntax */ -import { - chunk, - compact, - difference, - flatten, - isArray, - isEmpty, - isNumber, - sample, - uniqBy, -} from 'lodash'; +import { chunk, compact, difference, flatten, isArray, isEmpty, isNumber, uniqBy } from 'lodash'; import pRetry from 'p-retry'; import { Snode } from '../../../data/types'; import { getSodiumRenderer } from '../../crypto'; import { StringUtils, UserUtils } from '../../utils'; import { fromBase64ToArray, fromHexToArray } from '../../utils/String'; -import { EmptySwarmError } from '../../utils/errors'; import { SeedNodeAPI } from '../seed_node_api'; import { MAX_SUBREQUESTS_COUNT, - UpdateExpiryOnNodeSubRequest, + UpdateExpiryOnNodeUserSubRequest, WithShortenOrExtend, fakeHash, } from './SnodeRequestTypes'; -import { doSnodeBatchRequest } from './batchRequest'; -import { getSwarmFor } from './snodePool'; -import { SnodeSignature } from './snodeSignatures'; +import { BatchRequests } from './batchRequest'; +import { SnodePool } from './snodePool'; import { ExpireMessageResultItem, ExpireMessagesResultsContent } from './types'; export type verifyExpireMsgsResponseSignatureProps = ExpireMessageResultItem & { @@ -152,13 +140,20 @@ export async function processExpireRequestResponse( type UpdatedExpiryWithHashes = { messageHashes: Array; updatedExpiryMs: number }; type UpdatedExpiryWithHash = { messageHash: string; updatedExpiryMs: number }; -async function updateExpiryOnNodes( +async function updateExpiryOnNodesNoRetries( targetNode: Snode, ourPubKey: string, - expireRequests: Array + expireRequests: Array ): Promise> { try { - const result = await doSnodeBatchRequest(expireRequests, targetNode, 4000, ourPubKey, 'batch'); + const result = await BatchRequests.doUnsignedSnodeBatchRequestNoRetries( + expireRequests, + targetNode, + 10000, + ourPubKey, + false, + 'batch' + ); if (!result || result.length !== expireRequests.length) { window.log.error( @@ -189,7 +184,7 @@ async function updateExpiryOnNodes( ourPubKey, targetNode, bodyIndex as ExpireMessagesResultsContent, - request.params.messages + request.messageHashes ); }) ); @@ -225,7 +220,7 @@ async function updateExpiryOnNodes( } const hashesRequestedButNotInResults = difference( - flatten(expireRequests.map(m => m.params.messages)), + flatten(expireRequests.map(m => m.messageHashes)), [...flatten(changesValid.map(c => c.messageHashes)), fakeHash] ); if (!isEmpty(hashesRequestedButNotInResults)) { @@ -290,7 +285,7 @@ export async function buildExpireRequestBatchExpiry( export async function buildExpireRequestSingleExpiry( expireDetails: ExpireMessageWithExpiryOnSnodeProps -): Promise { +): Promise { const ourPubKey = UserUtils.getOurPubKeyStrFromCache(); if (!ourPubKey) { window.log.error('[buildExpireRequestSingleExpiry] No user pubkey'); @@ -300,30 +295,11 @@ export async function buildExpireRequestSingleExpiry( // NOTE for shortenOrExtend, '' means we want to hardcode the expiry to a TTL value, otherwise it's a shorten or extension of the TTL - const signResult = await SnodeSignature.generateUpdateExpirySignature({ + return new UpdateExpiryOnNodeUserSubRequest({ + expiryMs, + messagesHashes: messageHashes, shortenOrExtend, - timestamp: expiryMs, - messageHashes, }); - - if (!signResult) { - window.log.error( - `[buildExpireRequestSingleExpiry] SnodeSignature.generateUpdateExpirySignature returned an empty result` - ); - return null; - } - return { - method: 'expire' as const, - params: { - pubkey: ourPubKey, - pubkey_ed25519: signResult.pubkey_ed25519.toUpperCase(), - messages: messageHashes, - expiry: expiryMs, - extend: shortenOrExtend === 'extend' || undefined, - shorten: shortenOrExtend === 'shorten' || undefined, - signature: signResult?.signature, - }, - }; } type GroupedBySameExpiry = Record>; @@ -399,10 +375,9 @@ export async function expireMessagesOnSnode( ): Promise> { const ourPubKey = UserUtils.getOurPubKeyStrFromCache(); if (!ourPubKey) { - throw new Error('[expireMessageOnSnode] No pubkey found'); + throw new Error('[expireMessagesOnSnode] No pubkey found'); } - - let snode: Snode | undefined; + let targetNode: Snode | undefined; try { // key is a string even if it is really a number because Object.keys only knows strings... @@ -425,19 +400,15 @@ export async function expireMessagesOnSnode( if (!expireRequestsParams || isEmpty(expireRequestsParams)) { throw new Error(`Failed to build expire request`); } - // we most likely will only have a single chunk, so this is a bit of over engineered. // if any of those requests fails, make sure to not consider const allSettled = await Promise.allSettled( expireRequestsParams.map(chunkRequest => pRetry( async () => { - const swarm = await getSwarmFor(ourPubKey); - snode = sample(swarm); - if (!snode) { - throw new EmptySwarmError(ourPubKey, 'Ran out of swarm nodes to query'); - } - return updateExpiryOnNodes(snode, ourPubKey, chunkRequest); + targetNode = await SnodePool.getNodeFromSwarmOrThrow(ourPubKey); + + return updateExpiryOnNodesNoRetries(targetNode, ourPubKey, chunkRequest); }, { retries: 3, @@ -455,7 +426,7 @@ export async function expireMessagesOnSnode( return flatten(compact(allSettled.map(m => (m.status === 'fulfilled' ? m.value : null)))); } catch (e) { - const snodeStr = snode ? `${snode.ip}:${snode.port}` : 'null'; + const snodeStr = targetNode ? `${targetNode.ip}:${targetNode.port}` : 'null'; window?.log?.warn( `[expireMessageOnSnode] ${e.code || ''}${ e.message || e diff --git a/ts/session/apis/snode_api/factories/DeleteGroupHashesRequestFactory.ts b/ts/session/apis/snode_api/factories/DeleteGroupHashesRequestFactory.ts new file mode 100644 index 0000000000..033db8c480 --- /dev/null +++ b/ts/session/apis/snode_api/factories/DeleteGroupHashesRequestFactory.ts @@ -0,0 +1,35 @@ +import { UserGroupsGet } from 'libsession_util_nodejs'; +import { isEmpty } from 'lodash'; +import { ed25519Str } from '../../../utils/String'; +import { DeleteHashesFromGroupNodeSubRequest } from '../SnodeRequestTypes'; + +function makeGroupHashesToDeleteSubRequest({ + messagesHashes, + group, +}: { + group: Pick; + messagesHashes: Set; +}) { + const groupPk = group.pubkeyHex; + const messagesHashesArr = [...messagesHashes]; + if (messagesHashesArr.length) { + if (!group.secretKey || isEmpty(group.secretKey)) { + window.log.debug( + `makeGroupHashesToDeleteSubRequest: ${ed25519Str(groupPk)}: messagesHashesArr not empty but we do not have the secretKey` + ); + + throw new Error( + 'makeGroupHashesToDeleteSubRequest: messagesHashesArr not empty but we do not have the secretKey' + ); + } + + return new DeleteHashesFromGroupNodeSubRequest({ + messagesHashes: messagesHashesArr, + groupPk, + secretKey: group.secretKey, + }); + } + return undefined; +} + +export const DeleteGroupHashesFactory = { makeGroupHashesToDeleteSubRequest }; diff --git a/ts/session/apis/snode_api/factories/DeleteUserHashesRequestFactory.ts b/ts/session/apis/snode_api/factories/DeleteUserHashesRequestFactory.ts new file mode 100644 index 0000000000..441638aa4e --- /dev/null +++ b/ts/session/apis/snode_api/factories/DeleteUserHashesRequestFactory.ts @@ -0,0 +1,13 @@ +import { DeleteHashesFromUserNodeSubRequest } from '../SnodeRequestTypes'; + +function makeUserHashesToDeleteSubRequest({ messagesHashes }: { messagesHashes: Set }) { + const messagesHashesArr = [...messagesHashes]; + if (messagesHashesArr.length) { + return new DeleteHashesFromUserNodeSubRequest({ + messagesHashes: messagesHashesArr, + }); + } + return undefined; +} + +export const DeleteUserHashesFactory = { makeUserHashesToDeleteSubRequest }; diff --git a/ts/session/apis/snode_api/factories/StoreGroupRequestFactory.ts b/ts/session/apis/snode_api/factories/StoreGroupRequestFactory.ts new file mode 100644 index 0000000000..d5b01d06c5 --- /dev/null +++ b/ts/session/apis/snode_api/factories/StoreGroupRequestFactory.ts @@ -0,0 +1,191 @@ +import { UserGroupsGet } from 'libsession_util_nodejs'; +import { compact, isEmpty } from 'lodash'; +import { SignalService } from '../../../../protobuf'; +import { MetaGroupWrapperActions } from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { GroupUpdateInfoChangeMessage } from '../../../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInfoChangeMessage'; +import { GroupUpdateMemberChangeMessage } from '../../../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberChangeMessage'; +import { MessageWrapper } from '../../../sending/MessageWrapper'; +import { ed25519Str } from '../../../utils/String'; +import { PendingChangesForGroup } from '../../../utils/libsession/libsession_utils'; +import { + StoreGroupExtraData, + StoreGroupInfoSubRequest, + StoreGroupKeysSubRequest, + StoreGroupMembersSubRequest, + StoreGroupMessageSubRequest, +} from '../SnodeRequestTypes'; +import { SnodeNamespaces } from '../namespaces'; +import { GroupUpdateDeleteMemberContentMessage } from '../../../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateDeleteMemberContentMessage'; +import { GroupUpdateMemberLeftNotificationMessage } from '../../../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberLeftNotificationMessage'; + +export type StoreMessageToSubRequestType = + | GroupUpdateMemberChangeMessage + | GroupUpdateInfoChangeMessage + | GroupUpdateDeleteMemberContentMessage + | GroupUpdateMemberLeftNotificationMessage; + +async function makeGroupMessageSubRequest( + updateMessages: Array, + group: Pick +) { + const compactedMessages = compact(updateMessages); + if (isEmpty(compactedMessages)) { + return []; + } + const groupPk = compactedMessages[0].destination; + const allForSameDestination = compactedMessages.every(m => m.destination === groupPk); + if (!allForSameDestination) { + throw new Error('makeGroupMessageSubRequest: not all messages are for the same destination'); + } + + const messagesToEncrypt: Array = compactedMessages.map(updateMessage => { + const wrapped = MessageWrapper.wrapContentIntoEnvelope( + SignalService.Envelope.Type.SESSION_MESSAGE, + undefined, + updateMessage.createAtNetworkTimestamp, // message is signed with this timestmap + updateMessage.plainTextBuffer() + ); + + return { + namespace: SnodeNamespaces.ClosedGroupMessages, + pubkey: updateMessage.destination, + ttl: updateMessage.ttl(), + networkTimestamp: updateMessage.createAtNetworkTimestamp, + data: SignalService.Envelope.encode(wrapped).finish(), + dbMessageIdentifier: updateMessage.identifier, + }; + }); + + const encryptedContent = messagesToEncrypt.length + ? await MetaGroupWrapperActions.encryptMessages( + groupPk, + messagesToEncrypt.map(m => m.data) + ) + : []; + if (encryptedContent.length !== messagesToEncrypt.length) { + throw new Error( + 'makeGroupMessageSubRequest: MetaGroupWrapperActions.encryptMessages did not return the right count of items' + ); + } + + const updateMessagesEncrypted = messagesToEncrypt.map((requestDetails, index) => ({ + ...requestDetails, + data: encryptedContent[index], + })); + + const updateMessagesRequests = updateMessagesEncrypted.map(m => { + return new StoreGroupMessageSubRequest({ + encryptedData: m.data, + groupPk, + ttlMs: m.ttl, + dbMessageIdentifier: m.dbMessageIdentifier, + ...group, + createdAtNetworkTimestamp: m.networkTimestamp, + }); + }); + + return updateMessagesRequests; +} + +function makeStoreGroupKeysSubRequest({ + encryptedSupplementKeys, + group, +}: { + group: Pick; + encryptedSupplementKeys: Uint8Array | null; +}) { + const groupPk = group.pubkeyHex; + if (!encryptedSupplementKeys?.length) { + return undefined; + } + + // supplementalKeys are already encrypted, but we still need the secretKey to sign the request + + if (!group.secretKey || isEmpty(group.secretKey)) { + window.log.debug( + `pushChangesToGroupSwarmIfNeeded: ${ed25519Str(groupPk)}: keysEncryptedmessage not empty but we do not have the secretKey` + ); + + throw new Error( + 'pushChangesToGroupSwarmIfNeeded: keysEncryptedmessage not empty but we do not have the secretKey' + ); + } + return new StoreGroupKeysSubRequest({ + encryptedData: encryptedSupplementKeys, + groupPk, + secretKey: group.secretKey, + }); +} + +/** + * Make the requests needed to store that group config details. + * Note: the groupKeys request is always returned first, as it needs to be stored first on the swarm. + * This is to avoid a race condition where some clients get a groupInfo encrypted with a new key, when the new groupKeys was not stored yet. + */ +function makeStoreGroupConfigSubRequest({ + group, + pendingConfigData, +}: { + group: Pick; + pendingConfigData: Array; +}) { + if (!pendingConfigData.length) { + return []; + } + const groupPk = group.pubkeyHex; + + if (!group.secretKey || isEmpty(group.secretKey)) { + window.log.debug( + `pushChangesToGroupSwarmIfNeeded: ${ed25519Str(groupPk)}: pendingConfigMsgs not empty but we do not have the secretKey` + ); + + throw new Error( + 'pushChangesToGroupSwarmIfNeeded: pendingConfigMsgs not empty but we do not have the secretKey' + ); + } + + const groupInfoSubRequests = compact( + pendingConfigData.map(m => + m.namespace === SnodeNamespaces.ClosedGroupInfo + ? new StoreGroupInfoSubRequest({ + encryptedData: m.ciphertext, + groupPk, + secretKey: group.secretKey, + }) + : null + ) + ); + + const groupMembersSubRequests = compact( + pendingConfigData.map(m => + m.namespace === SnodeNamespaces.ClosedGroupMembers + ? new StoreGroupMembersSubRequest({ + encryptedData: m.ciphertext, + groupPk, + secretKey: group.secretKey, + }) + : null + ) + ); + + const groupKeysSubRequests = compact( + pendingConfigData.map(m => + m.namespace === SnodeNamespaces.ClosedGroupKeys + ? new StoreGroupKeysSubRequest({ + encryptedData: m.ciphertext, + groupPk, + secretKey: group.secretKey, + }) + : null + ) + ); + + // we want to store first the keys (as the info and members might already be encrypted with them) + return [...groupKeysSubRequests, ...groupInfoSubRequests, ...groupMembersSubRequests]; +} + +export const StoreGroupRequestFactory = { + makeGroupMessageSubRequest, + makeStoreGroupConfigSubRequest, + makeStoreGroupKeysSubRequest, +}; diff --git a/ts/session/apis/snode_api/getExpiriesRequest.ts b/ts/session/apis/snode_api/getExpiriesRequest.ts index 0dda806ff1..ba4650af96 100644 --- a/ts/session/apis/snode_api/getExpiriesRequest.ts +++ b/ts/session/apis/snode_api/getExpiriesRequest.ts @@ -1,16 +1,14 @@ /* eslint-disable no-restricted-syntax */ -import { isFinite, isNil, isNumber, sample } from 'lodash'; +import { PubkeyType } from 'libsession_util_nodejs'; +import { isFinite, isNil, isNumber } from 'lodash'; import pRetry from 'p-retry'; import { Snode } from '../../../data/types'; import { UserUtils } from '../../utils'; -import { EmptySwarmError } from '../../utils/errors'; import { SeedNodeAPI } from '../seed_node_api'; import { GetExpiriesFromNodeSubRequest, fakeHash } from './SnodeRequestTypes'; -import { doSnodeBatchRequest } from './batchRequest'; -import { GetNetworkTime } from './getNetworkTime'; -import { getSwarmFor } from './snodePool'; -import { SnodeSignature } from './snodeSignatures'; -import { GetExpiriesResultsContent } from './types'; +import { BatchRequests } from './batchRequest'; +import { SnodePool } from './snodePool'; +import { GetExpiriesResultsContent, WithMessagesHashes } from './types'; export type GetExpiriesRequestResponseResults = Record; @@ -41,16 +39,19 @@ export async function processGetExpiriesRequestResponse( return results; } -async function getExpiriesFromNodes( +async function getExpiriesFromNodesNoRetries( targetNode: Snode, - expireRequest: GetExpiriesFromNodeSubRequest + messageHashes: Array, + associatedWith: PubkeyType ) { try { - const result = await doSnodeBatchRequest( + const expireRequest = new GetExpiriesFromNodeSubRequest({ messagesHashes: messageHashes }); + const result = await BatchRequests.doUnsignedSnodeBatchRequestNoRetries( [expireRequest], targetNode, - 4000, - expireRequest.params.pubkey, + 10000, + associatedWith, + false, 'batch' ); @@ -67,21 +68,21 @@ async function getExpiriesFromNodes( const firstResult = result[0]; if (firstResult.code !== 200) { - throw Error(`getExpiriesFromNodes result is not 200 but ${firstResult.code}`); + throw Error(`getExpiriesFromNodesNoRetries result is not 200 but ${firstResult.code}`); } // expirationResults is a record of {messageHash: currentExpiry} const expirationResults = await processGetExpiriesRequestResponse( targetNode, firstResult.body.expiries as GetExpiriesResultsContent, - expireRequest.params.messages + expireRequest.messageHashes ); // Note: even if expirationResults is empty we need to process the results. // The status code is 200, so if the results is empty, it means all those messages already expired. // Note: a hash which already expired on the server is not going to be returned. So we force it's fetchedExpiry to be now() to make it expire asap - const expiriesWithForcedExpiried = expireRequest.params.messages.map(messageHash => ({ + const expiriesWithForcedExpiried = expireRequest.messageHashes.map(messageHash => ({ messageHash, fetchedExpiry: expirationResults?.[messageHash] || Date.now(), })); @@ -97,47 +98,6 @@ async function getExpiriesFromNodes( } } -export type GetExpiriesFromSnodeProps = { - messageHashes: Array; -}; - -export async function buildGetExpiriesRequest({ - messageHashes, -}: GetExpiriesFromSnodeProps): Promise { - const timestamp = GetNetworkTime.getNowWithNetworkOffset(); - - const ourPubKey = UserUtils.getOurPubKeyStrFromCache(); - if (!ourPubKey) { - window.log.error('[buildGetExpiriesRequest] No pubkey found', messageHashes); - return null; - } - - const signResult = await SnodeSignature.generateGetExpiriesSignature({ - timestamp, - messageHashes, - }); - - if (!signResult) { - window.log.error( - `[buildGetExpiriesRequest] SnodeSignature.generateUpdateExpirySignature returned an empty result ${messageHashes}` - ); - return null; - } - - const getExpiriesParams: GetExpiriesFromNodeSubRequest = { - method: 'get_expiries', - params: { - pubkey: ourPubKey, - pubkey_ed25519: signResult.pubkey_ed25519.toUpperCase(), - messages: messageHashes, - timestamp, - signature: signResult?.signature, - }, - }; - - return getExpiriesParams; -} - /** * Sends an 'get_expiries' request which retrieves the current expiry timestamps of the given messages. * @@ -146,34 +106,24 @@ export async function buildGetExpiriesRequest({ * @param timestamp the time (ms) the request was initiated, must be within ±60s of the current time so using the server time is recommended. * @returns an arrray of the expiry timestamps (TTL) for the given messages */ -export async function getExpiriesFromSnode({ messageHashes }: GetExpiriesFromSnodeProps) { +export async function getExpiriesFromSnode({ messagesHashes }: WithMessagesHashes) { // FIXME There is a bug in the snode code that requires at least 2 messages to be requested. Will be fixed in next storage server release - if (messageHashes.length === 1) { - messageHashes.push(fakeHash); + if (messagesHashes.length === 1) { + messagesHashes.push(fakeHash); } const ourPubKey = UserUtils.getOurPubKeyStrFromCache(); if (!ourPubKey) { - window.log.error('[getExpiriesFromSnode] No pubkey found', messageHashes); + window.log.error('[getExpiriesFromSnode] No pubkey found', messagesHashes); return []; } - let snode: Snode | undefined; - try { - const expireRequestParams = await buildGetExpiriesRequest({ messageHashes }); - if (!expireRequestParams) { - throw new Error(`Failed to build get_expiries request ${JSON.stringify({ messageHashes })}`); - } - const fetchedExpiries = await pRetry( async () => { - const swarm = await getSwarmFor(ourPubKey); - snode = sample(swarm); - if (!snode) { - throw new EmptySwarmError(ourPubKey, 'Ran out of swarm nodes to query'); - } - return getExpiriesFromNodes(snode, expireRequestParams); + const targetNode = await SnodePool.getNodeFromSwarmOrThrow(ourPubKey); + + return getExpiriesFromNodesNoRetries(targetNode, messagesHashes, ourPubKey); }, { retries: 3, @@ -189,11 +139,10 @@ export async function getExpiriesFromSnode({ messageHashes }: GetExpiriesFromSno return fetchedExpiries; } catch (e) { - const snodeStr = snode ? `${snode.ip}:${snode.port}` : 'null'; window?.log?.warn( `[getExpiriesFromSnode] ${e.code ? `${e.code} ` : ''}${ e.message || e - } by ${ourPubKey} for ${messageHashes} via snode:${snodeStr}` + } by ${ourPubKey} for ${messagesHashes}` ); throw e; } diff --git a/ts/session/apis/snode_api/getNetworkTime.ts b/ts/session/apis/snode_api/getNetworkTime.ts index 90f52d6ea7..aa052003e0 100644 --- a/ts/session/apis/snode_api/getNetworkTime.ts +++ b/ts/session/apis/snode_api/getNetworkTime.ts @@ -1,25 +1,28 @@ /** - * Makes a post to a node to receive the timestamp info. If non-existant, returns -1 + * Makes a post to a node to receive the timestamp info. If non-existent, returns -1 * @param snode Snode to send request to * @returns timestamp of the response from snode */ import { isNumber } from 'lodash'; + +import { BatchRequests } from './batchRequest'; import { Snode } from '../../../data/types'; import { NetworkTimeSubRequest } from './SnodeRequestTypes'; -import { doSnodeBatchRequest } from './batchRequest'; - -function getNetworkTimeSubRequests(): Array { - const request: NetworkTimeSubRequest = { method: 'info', params: {} }; - - return [request]; -} +import { NetworkTime } from '../../../util/NetworkTime'; const getNetworkTime = async (snode: Snode): Promise => { - const subRequests = getNetworkTimeSubRequests(); - const result = await doSnodeBatchRequest(subRequests, snode, 4000, null); + const subRequest = new NetworkTimeSubRequest(); + + const result = await BatchRequests.doUnsignedSnodeBatchRequestNoRetries( + [subRequest], + snode, + 10000, + null, + false + ); if (!result || !result.length) { - window?.log?.warn(`getNetworkTime on ${snode.ip}:${snode.port} returned falsish value`, result); + window?.log?.warn(`getNetworkTime on ${snode.ip}:${snode.port} returned falsy value`, result); throw new Error('getNetworkTime: Invalid result'); } @@ -38,48 +41,15 @@ const getNetworkTime = async (snode: Snode): Promise => { return timestamp; }; -let latestTimestampOffset = Number.MAX_SAFE_INTEGER; - function handleTimestampOffsetFromNetwork(_request: string, snodeTimestamp: number) { if (snodeTimestamp && isNumber(snodeTimestamp) && snodeTimestamp > 1609419600 * 1000) { // first january 2021. Arbitrary, just want to make sure the return timestamp is somehow valid and not some crazy low value - const now = Date.now(); - if (latestTimestampOffset === Number.MAX_SAFE_INTEGER) { - window?.log?.info(`first timestamp offset received: ${now - snodeTimestamp}ms`); - } - latestTimestampOffset = now - snodeTimestamp; - } -} - -/** - * This function has no use to be called except during tests. - * @returns the current offset we have with the rest of the network. - */ -function getLatestTimestampOffset() { - if (latestTimestampOffset === Number.MAX_SAFE_INTEGER) { - window.log.debug('latestTimestampOffset is not set yet'); - return 0; + const clockTime = Date.now(); + NetworkTime.setLatestTimestampOffset(clockTime - snodeTimestamp); } - // window.log.info('latestTimestampOffset is ', latestTimestampOffset); - - return latestTimestampOffset; -} - -function getNowWithNetworkOffset() { - // make sure to call exports here, as we stub the exported one for testing. - return Date.now() - GetNetworkTime.getLatestTimestampOffset(); -} - -function getNowWithNetworkOffsetSeconds() { - // make sure to call exports here, as we stub the exported one for testing. - - return Math.floor(GetNetworkTime.getNowWithNetworkOffset() / 1000); } export const GetNetworkTime = { getNetworkTime, handleTimestampOffsetFromNetwork, - getNowWithNetworkOffsetSeconds, - getLatestTimestampOffset, - getNowWithNetworkOffset, }; diff --git a/ts/session/apis/snode_api/getServiceNodesList.ts b/ts/session/apis/snode_api/getServiceNodesList.ts index f9a1993a1c..cc3f11a901 100644 --- a/ts/session/apis/snode_api/getServiceNodesList.ts +++ b/ts/session/apis/snode_api/getServiceNodesList.ts @@ -1,30 +1,10 @@ -import _, { intersectionWith, sampleSize } from 'lodash'; -import { SnodePool } from '.'; +import { compact, intersectionWith, sampleSize } from 'lodash'; +import { BatchRequests } from './batchRequest'; +import { GetNetworkTime } from './getNetworkTime'; +import { SnodePool } from './snodePool'; import { Snode } from '../../../data/types'; import { GetServiceNodesSubRequest } from './SnodeRequestTypes'; -import { doSnodeBatchRequest } from './batchRequest'; -import { GetNetworkTime } from './getNetworkTime'; -import { minSnodePoolCount, requiredSnodesForAgreement } from './snodePool'; - -function buildSnodeListRequests(): Array { - const request: GetServiceNodesSubRequest = { - method: 'oxend_request', - params: { - endpoint: 'get_service_nodes', - params: { - active_only: true, - fields: { - public_ip: true, - storage_port: true, - pubkey_x25519: true, - pubkey_ed25519: true, - storage_server_version: true, - }, - }, - }, - }; - return [request]; -} +import { SnodePoolConstants } from './snodePoolConstants'; /** * Returns a list of unique snodes got from the specified targetNode. @@ -32,8 +12,15 @@ function buildSnodeListRequests(): Array { * This is exported for testing purpose only. */ async function getSnodePoolFromSnode(targetNode: Snode): Promise> { - const requests = buildSnodeListRequests(); - const results = await doSnodeBatchRequest(requests, targetNode, 4000, null); + const subRequest = new GetServiceNodesSubRequest(); + + const results = await BatchRequests.doUnsignedSnodeBatchRequestNoRetries( + [subRequest], + targetNode, + 10000, + null, + false + ); const firstResult = results[0]; @@ -62,7 +49,7 @@ async function getSnodePoolFromSnode(targetNode: Snode): Promise> { GetNetworkTime.handleTimestampOffsetFromNetwork('get_service_nodes', json.t); // we the return list by the snode is already made of uniq snodes - return _.compact(snodes); + return compact(snodes); } catch (e) { window?.log?.error('Invalid json response'); return []; @@ -78,7 +65,7 @@ async function getSnodePoolFromSnode(targetNode: Snode): Promise> { */ async function getSnodePoolFromSnodes() { const existingSnodePool = await SnodePool.getSnodePoolFromDBOrFetchFromSeed(); - if (existingSnodePool.length <= minSnodePoolCount) { + if (existingSnodePool.length <= SnodePoolConstants.minSnodePoolCount) { window?.log?.warn( 'getSnodePoolFromSnodes: Cannot get snodes list from snodes; not enough snodes', existingSnodePool.length @@ -115,9 +102,9 @@ async function getSnodePoolFromSnodes() { } ); // We want the snodes to agree on at least this many snodes - if (commonSnodes.length < requiredSnodesForAgreement) { + if (commonSnodes.length < SnodePoolConstants.requiredSnodesForAgreement) { throw new Error( - `Inconsistent snode pools. We did not get at least ${requiredSnodesForAgreement} in common` + `Inconsistent snode pools. We did not get at least ${SnodePoolConstants.requiredSnodesForAgreement} in common` ); } return commonSnodes; diff --git a/ts/session/apis/snode_api/getSwarmFor.ts b/ts/session/apis/snode_api/getSwarmFor.ts index ae94530986..3c38ce78c7 100644 --- a/ts/session/apis/snode_api/getSwarmFor.ts +++ b/ts/session/apis/snode_api/getSwarmFor.ts @@ -1,14 +1,11 @@ import { isArray } from 'lodash'; import pRetry from 'p-retry'; +import { PubKey } from '../../types'; +import { BatchRequests } from './batchRequest'; +import { GetNetworkTime } from './getNetworkTime'; +import { SnodePool } from './snodePool'; import { Snode } from '../../../data/types'; import { SwarmForSubRequest } from './SnodeRequestTypes'; -import { doSnodeBatchRequest } from './batchRequest'; -import { GetNetworkTime } from './getNetworkTime'; -import { getRandomSnode } from './snodePool'; - -function buildSwarmForSubRequests(pubkey: string): Array { - return [{ method: 'get_swarm', params: { pubkey } }]; -} /** * get snodes for pubkey from random snode. Uses an existing snode @@ -17,9 +14,18 @@ async function requestSnodesForPubkeyWithTargetNodeRetryable( pubkey: string, targetNode: Snode ): Promise> { - const subRequests = buildSwarmForSubRequests(pubkey); - - const result = await doSnodeBatchRequest(subRequests, targetNode, 4000, pubkey); + if (!PubKey.is03Pubkey(pubkey) && !PubKey.is05Pubkey(pubkey)) { + throw new Error('invalid pubkey given for swarmFor'); + } + const subrequest = new SwarmForSubRequest(pubkey); + + const result = await BatchRequests.doUnsignedSnodeBatchRequestNoRetries( + [subrequest], + targetNode, + 10000, + pubkey, + false + ); if (!result || !result.length) { window?.log?.warn( @@ -87,7 +93,7 @@ async function requestSnodesForPubkeyRetryable(pubKey: string): Promise { - const targetNode = await getRandomSnode(); + const targetNode = await SnodePool.getRandomSnode(); return requestSnodesForPubkeyWithTargetNode(pubKey, targetNode); }, @@ -95,7 +101,7 @@ async function requestSnodesForPubkeyRetryable(pubKey: string): Promise { window?.log?.warn( `requestSnodesForPubkeyRetryable attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left...` diff --git a/ts/session/apis/snode_api/index.ts b/ts/session/apis/snode_api/index.ts index eb5e918308..fa2326d588 100644 --- a/ts/session/apis/snode_api/index.ts +++ b/ts/session/apis/snode_api/index.ts @@ -1,7 +1,6 @@ -import * as SnodePool from './snodePool'; import * as SNodeAPI from './SNodeAPI'; import * as Onions from './onions'; import { getSwarmPollingInstance } from './swarmPolling'; -export { SnodePool, SNodeAPI, Onions, getSwarmPollingInstance }; +export { Onions, SNodeAPI, getSwarmPollingInstance }; diff --git a/ts/session/apis/snode_api/namespaces.ts b/ts/session/apis/snode_api/namespaces.ts index e4345401b3..377b706c55 100644 --- a/ts/session/apis/snode_api/namespaces.ts +++ b/ts/session/apis/snode_api/namespaces.ts @@ -1,12 +1,17 @@ import { last, orderBy } from 'lodash'; -import { assertUnreachable } from '../../../types/sqlSharedTypes'; import { PickEnum } from '../../../types/Enums'; +import { assertUnreachable } from '../../../types/sqlSharedTypes'; export enum SnodeNamespaces { + /** + * The messages sent to a closed group are sent and polled from this namespace + */ + LegacyClosedGroup = -10, + /** * This is the namespace anyone can deposit a message for us */ - UserMessages = 0, + Default = 0, /** * This is the namespace used to sync our profile @@ -27,42 +32,80 @@ export enum SnodeNamespaces { UserGroups = 5, /** - * The messages sent to a closed group are sent and polled from this namespace + * This is the namespace that revoked members can still poll messages from */ - ClosedGroupMessage = -10, + ClosedGroupRevokedRetrievableMessages = -11, /** - * This is the namespace used to sync the closed group details for each of the closed groups we are polling + * This is the namespace used to sync the closed group messages for each closed group */ - // ClosedGroupInfo = 1, + ClosedGroupMessages = 11, + + /** + * This is the namespace used to sync the closed group details for each closed group + */ + ClosedGroupKeys = 12, + + /** + * This is the namespace used to sync the members for each closed group + */ + ClosedGroupInfo = 13, + + /** + * This is the namespace used to sync the keys for each closed group + */ + ClosedGroupMembers = 14, } -export type SnodeNamespacesGroup = PickEnum< +export type SnodeNamespacesLegacyGroup = PickEnum< SnodeNamespaces, - SnodeNamespaces.ClosedGroupMessage // | SnodeNamespaces.ClosedGroupInfo + SnodeNamespaces.LegacyClosedGroup >; -export type SnodeNamespacesUser = PickEnum< +export type SnodeNamespacesGroupConfig = PickEnum< SnodeNamespaces, - SnodeNamespaces.UserContacts | SnodeNamespaces.UserProfile | SnodeNamespaces.UserMessages + | SnodeNamespaces.ClosedGroupInfo + | SnodeNamespaces.ClosedGroupMembers + | SnodeNamespaces.ClosedGroupKeys +>; + +/** + * the namespaces to which a 03-group can store/retrieve messages from/to + */ +export type SnodeNamespacesGroup = + | SnodeNamespacesGroupConfig + | PickEnum + | PickEnum; + +export type SnodeNamespacesUser = PickEnum; + +export type SnodeNamespacesUserConfig = PickEnum< + SnodeNamespaces, + | SnodeNamespaces.UserProfile + | SnodeNamespaces.UserContacts + | SnodeNamespaces.UserGroups + | SnodeNamespaces.ConvoInfoVolatile >; /** * Returns true if that namespace is associated with the config of a user (not his messages, only configs) */ // eslint-disable-next-line consistent-return -function isUserConfigNamespace(namespace: SnodeNamespaces) { +function isUserConfigNamespace(namespace: SnodeNamespaces): namespace is SnodeNamespacesUserConfig { switch (namespace) { - case SnodeNamespaces.UserMessages: - // user messages is not hosting config based messages - return false; - case SnodeNamespaces.UserContacts: case SnodeNamespaces.UserProfile: + case SnodeNamespaces.UserContacts: case SnodeNamespaces.UserGroups: case SnodeNamespaces.ConvoInfoVolatile: return true; - // case SnodeNamespaces.ClosedGroupInfo: - case SnodeNamespaces.ClosedGroupMessage: + case SnodeNamespaces.ClosedGroupInfo: + case SnodeNamespaces.ClosedGroupKeys: + case SnodeNamespaces.ClosedGroupMembers: + case SnodeNamespaces.ClosedGroupMessages: + case SnodeNamespaces.LegacyClosedGroup: + case SnodeNamespaces.ClosedGroupRevokedRetrievableMessages: + case SnodeNamespaces.Default: + // user messages is not hosting config based messages return false; default: @@ -75,21 +118,86 @@ function isUserConfigNamespace(namespace: SnodeNamespaces) { } } -// eslint-disable-next-line consistent-return -function namespacePriority(namespace: SnodeNamespaces): number { +/** + * Returns true if that namespace is one of the namespace used for the 03-group config messages + */ +function isGroupConfigNamespace( + namespace: SnodeNamespaces +): namespace is SnodeNamespacesGroupConfig { switch (namespace) { - case SnodeNamespaces.UserMessages: - return 10; + case SnodeNamespaces.Default: case SnodeNamespaces.UserContacts: - return 1; case SnodeNamespaces.UserProfile: - return 1; case SnodeNamespaces.UserGroups: - return 1; case SnodeNamespaces.ConvoInfoVolatile: - return 1; - case SnodeNamespaces.ClosedGroupMessage: + case SnodeNamespaces.LegacyClosedGroup: + case SnodeNamespaces.ClosedGroupMessages: + case SnodeNamespaces.ClosedGroupRevokedRetrievableMessages: + return false; + case SnodeNamespaces.ClosedGroupInfo: + case SnodeNamespaces.ClosedGroupKeys: + case SnodeNamespaces.ClosedGroupMembers: + return true; + + default: + try { + assertUnreachable(namespace, `isGroupConfigNamespace case not handled: ${namespace}`); + } catch (e) { + window.log.warn(`isGroupConfigNamespace case not handled: ${namespace}: ${e.message}`); + } + } + return false; +} + +/** + * + * @param namespace the namespace to check + * @returns true if that namespace is a valid namespace for a 03 group (either a config namespace or a message namespace) + */ +function isGroupNamespace(namespace: SnodeNamespaces): namespace is SnodeNamespacesGroup { + if (isGroupConfigNamespace(namespace)) { + return true; + } + if (namespace === SnodeNamespaces.ClosedGroupMessages) { + return true; + } + if (namespace === SnodeNamespaces.ClosedGroupRevokedRetrievableMessages) { + return true; + } + switch (namespace) { + case SnodeNamespaces.Default: + case SnodeNamespaces.UserContacts: + case SnodeNamespaces.UserProfile: + case SnodeNamespaces.UserGroups: + case SnodeNamespaces.ConvoInfoVolatile: + case SnodeNamespaces.LegacyClosedGroup: + return false; + default: + try { + assertUnreachable(namespace, `isGroupNamespace case not handled: ${namespace}`); + } catch (e) { + window.log.warn(`isGroupNamespace case not handled: ${namespace}: ${e.message}`); + return false; + } + } + return false; +} + +function namespacePriority(namespace: SnodeNamespaces): 10 | 1 { + switch (namespace) { + case SnodeNamespaces.Default: + case SnodeNamespaces.ClosedGroupMessages: return 10; + case SnodeNamespaces.UserGroups: + case SnodeNamespaces.ConvoInfoVolatile: + case SnodeNamespaces.UserProfile: + case SnodeNamespaces.UserContacts: + case SnodeNamespaces.LegacyClosedGroup: + case SnodeNamespaces.ClosedGroupInfo: + case SnodeNamespaces.ClosedGroupMembers: + case SnodeNamespaces.ClosedGroupKeys: + case SnodeNamespaces.ClosedGroupRevokedRetrievableMessages: + return 1; default: try { @@ -99,6 +207,7 @@ function namespacePriority(namespace: SnodeNamespaces): number { return 1; } } + return 1; } function maxSizeMap(namespaces: Array) { @@ -125,7 +234,45 @@ function maxSizeMap(namespaces: Array) { return sizeMap; } +function toRole(namespace: number) { + const asKnownNamespace: SnodeNamespaces = namespace; + switch (asKnownNamespace) { + case SnodeNamespaces.LegacyClosedGroup: + return 'legacyGroup'; + case SnodeNamespaces.Default: + return 'default'; + case SnodeNamespaces.UserProfile: + return 'userProfile'; + case SnodeNamespaces.UserContacts: + return 'userContacts'; + case SnodeNamespaces.ConvoInfoVolatile: + return 'convoVolatile'; + case SnodeNamespaces.UserGroups: + return 'userGroups'; + case SnodeNamespaces.ClosedGroupMessages: + return 'groupMsg'; + case SnodeNamespaces.ClosedGroupKeys: + return 'groupKeys'; + case SnodeNamespaces.ClosedGroupInfo: + return 'groupInfo'; + case SnodeNamespaces.ClosedGroupMembers: + return 'groupMembers'; + case SnodeNamespaces.ClosedGroupRevokedRetrievableMessages: + return 'groupRevoked'; + default: + return `${namespace}`; + } +} + +function toRoles(namespace: Array) { + return namespace.map(toRole); +} + export const SnodeNamespace = { isUserConfigNamespace, + isGroupConfigNamespace, + isGroupNamespace, maxSizeMap, + toRoles, + toRole, }; diff --git a/ts/session/apis/snode_api/onions.ts b/ts/session/apis/snode_api/onions.ts index 4c442a2623..60bbbd5656 100644 --- a/ts/session/apis/snode_api/onions.ts +++ b/ts/session/apis/snode_api/onions.ts @@ -1,6 +1,7 @@ +import https from 'https'; +// eslint-disable import/no-named-default import { AbortSignal } from 'abort-controller'; import ByteBuffer from 'bytebuffer'; -import https from 'https'; import { to_string } from 'libsodium-wrappers-sumo'; import { cloneDeep, isEmpty, isString, omit } from 'lodash'; import insecureNodeFetch, { RequestInit, Response } from 'node-fetch'; @@ -8,7 +9,7 @@ import pRetry from 'p-retry'; // eslint-disable-next-line import/no-unresolved import { AbortSignal as AbortSignalNode } from 'node-fetch/externals'; -import { dropSnodeFromSnodePool, dropSnodeFromSwarmIfNeeded, updateSwarmFor } from './snodePool'; +import { SnodePool } from './snodePool'; import { OnionPaths } from '../../onions'; import { incrementBadPathCountOrDrop } from '../../onions/onionPath'; @@ -262,6 +263,14 @@ function process406Or425Error(statusCode: number) { } } +function process401Error(statusCode: number) { + if (statusCode === 401) { + throw new pRetry.AbortError( + `Got 401 status code. Most likely a client bug. Retries would not help. ` + ); + } +} + function processOxenServerError(_statusCode: number, body?: string) { if (body === OXEN_SERVER_ERROR) { window?.log?.warn('[path] Got Oxen server Error. Not much to do if the server has troubles.'); @@ -298,19 +307,24 @@ export async function processOnionRequestErrorAtDestination({ body, destinationSnodeEd25519, associatedWith, + allow401s, }: { statusCode: number; body: string; destinationSnodeEd25519?: string; associatedWith?: string; + allow401s: boolean; }) { if (statusCode === 200) { return; } window?.log?.info( - `processOnionRequestErrorAtDestination. statusCode nok: ${statusCode}: "${body}"` + `processOnionRequestErrorAtDestination. statusCode nok: ${statusCode}: associatedWith:${associatedWith} destinationSnodeEd25519:${destinationSnodeEd25519}` ); process406Or425Error(statusCode); + if (!allow401s) { + process401Error(statusCode); + } processOxenServerError(statusCode, body); await process421Error(statusCode, body, associatedWith, destinationSnodeEd25519); if (destinationSnodeEd25519) { @@ -334,10 +348,10 @@ async function handleNodeNotFound({ window?.log?.warn('Handling NODE NOT FOUND with: ', shortNodeNotFound); if (associatedWith) { - await dropSnodeFromSwarmIfNeeded(associatedWith, ed25519NotFound); + await SnodePool.dropSnodeFromSwarmIfNeeded(associatedWith, ed25519NotFound); } - await dropSnodeFromSnodePool(ed25519NotFound); + await SnodePool.dropSnodeFromSnodePool(ed25519NotFound); snodeFailureCount[ed25519NotFound] = 0; // try to remove the not found snode from any of the paths if it's there. // it may not be here, as the snode note found might be the target snode of the request. @@ -388,6 +402,7 @@ async function processAnyOtherErrorAtDestination( // this test checks for error at the destination. if ( status !== 400 && + status !== 401 && // handled in process401Error status !== 406 && // handled in process406Error status !== 421 // handled in process421Error ) { @@ -508,6 +523,7 @@ async function processOnionResponse({ abortSignal, associatedWith, destinationSnodeEd25519, + allow401s, }: { response?: { text: () => Promise; status: number }; symmetricKey?: ArrayBuffer; @@ -515,6 +531,7 @@ async function processOnionResponse({ destinationSnodeEd25519?: string; abortSignal?: AbortSignal; associatedWith?: string; + allow401s: boolean; }): Promise { let ciphertext = ''; @@ -588,6 +605,7 @@ async function processOnionResponse({ body: jsonRes?.body, // this is really important. the `.body`. the .body should be a string. for instance for nodeNotFound but is most likely a dict (Record)) destinationSnodeEd25519, associatedWith, + allow401s, }); return jsonRes as SnodeResponse; @@ -726,11 +744,11 @@ async function handle421InvalidSwarm({ parsedBody.snodes.map((s: any) => ed25519Str(s.pubkey_ed25519)) ); - await updateSwarmFor(associatedWith, parsedBody.snodes); + await SnodePool.updateSwarmFor(associatedWith, parsedBody.snodes); throw new pRetry.AbortError(ERROR_421_HANDLED_RETRY_REQUEST); } // remove this node from the swarm of this pubkey - await dropSnodeFromSwarmIfNeeded(associatedWith, destinationSnodeEd25519); + await SnodePool.dropSnodeFromSwarmIfNeeded(associatedWith, destinationSnodeEd25519); } catch (e) { if (e.message !== ERROR_421_HANDLED_RETRY_REQUEST) { window?.log?.warn( @@ -738,7 +756,7 @@ async function handle421InvalidSwarm({ e ); // could not parse result. Consider that this snode as invalid - await dropSnodeFromSwarmIfNeeded(associatedWith, destinationSnodeEd25519); + await SnodePool.dropSnodeFromSwarmIfNeeded(associatedWith, destinationSnodeEd25519); } } await Onions.incrementBadSnodeCountOrDrop({ @@ -779,9 +797,9 @@ async function incrementBadSnodeCountOrDrop({ ); if (associatedWith) { - await dropSnodeFromSwarmIfNeeded(associatedWith, snodeEd25519); + await SnodePool.dropSnodeFromSwarmIfNeeded(associatedWith, snodeEd25519); } - await dropSnodeFromSnodePool(snodeEd25519); + await SnodePool.dropSnodeFromSnodePool(snodeEd25519); snodeFailureCount[snodeEd25519] = 0; await OnionPaths.dropSnodeFromPath(snodeEd25519); @@ -798,7 +816,7 @@ async function incrementBadSnodeCountOrDrop({ * This call tries to send the request via onion. If we get a bad path, it handles the snode removing of the swarm and snode pool. * But the caller needs to handle the retry (and rebuild the path on his side if needed) */ -async function sendOnionRequestHandlingSnodeEject({ +async function sendOnionRequestHandlingSnodeEjectNoRetries({ destSnodeX25519, finalDestOptions, nodePath, @@ -807,6 +825,7 @@ async function sendOnionRequestHandlingSnodeEject({ finalRelayOptions, useV4, throwErrors, + allow401s, }: { nodePath: Array; destSnodeX25519: string; @@ -816,9 +835,10 @@ async function sendOnionRequestHandlingSnodeEject({ associatedWith?: string; useV4: boolean; throwErrors: boolean; + allow401s: boolean; }): Promise { // this sendOnionRequestNoRetries() call has to be the only one like this. - // If you need to call it, call it through sendOnionRequestHandlingSnodeEject because this is the one handling path rebuilding and known errors + // If you need to call it, call it through sendOnionRequestHandlingSnodeEjectNoRetries because this is the one handling path rebuilding and known errors let response; let decodingSymmetricKey; try { @@ -834,7 +854,7 @@ async function sendOnionRequestHandlingSnodeEject({ if (window.sessionFeatureFlags?.debug.debugOnionRequests) { window.log.info( - `sendOnionRequestHandlingSnodeEject: sendOnionRequestNoRetries: useV4:${useV4} destSnodeX25519:${destSnodeX25519}; \nfinalDestOptions:${JSON.stringify( + `sendOnionRequestHandlingSnodeEjectNoRetries: sendOnionRequestNoRetries: useV4:${useV4} destSnodeX25519:${destSnodeX25519}; \nfinalDestOptions:${JSON.stringify( finalDestOptions )}; \nfinalRelayOptions:${JSON.stringify(finalRelayOptions)}\n\n result: ${JSON.stringify( result @@ -884,6 +904,7 @@ async function sendOnionRequestHandlingSnodeEject({ destinationSnodeEd25519, abortSignal, associatedWith, + allow401s, }); } @@ -1088,15 +1109,15 @@ const sendOnionRequestNoRetries = async ({ return { response, decodingSymmetricKey: destCtx.symmetricKey }; }; -async function sendOnionRequestSnodeDest( +async function sendOnionRequestSnodeDestNoRetries( onionPath: Array, targetNode: Snode, headers: Record, - plaintext: string | null, + allow401s: boolean, associatedWith?: string ) { - return Onions.sendOnionRequestHandlingSnodeEject({ + return Onions.sendOnionRequestHandlingSnodeEjectNoRetries({ nodePath: onionPath, destSnodeX25519: targetNode.pubkey_x25519, finalDestOptions: { @@ -1107,50 +1128,38 @@ async function sendOnionRequestSnodeDest( associatedWith, useV4: false, // sadly, request to snode do not support v4 yet throwErrors: false, + allow401s, }); } /** * If the fetch throws a retryable error we retry this call with a new path at most 3 times. If another error happens, we return it. If we have a result we just return it. */ -async function lokiOnionFetch({ +async function lokiOnionFetchNoRetries({ targetNode, associatedWith, body, headers, + allow401s, }: { targetNode: Snode; headers: Record; body: string | null; associatedWith?: string; + allow401s: boolean; }): Promise { try { - const retriedResult = await pRetry( - async () => { - // Get a path excluding `targetNode`: - const path = await OnionPaths.getOnionPath({ toExclude: targetNode }); - const result = await sendOnionRequestSnodeDest( - path, - targetNode, - headers, - body, - associatedWith - ); - return result; - }, - { - retries: 3, - factor: 1, - minTimeout: 100, - onFailedAttempt: e => { - window?.log?.warn( - `onionFetchRetryable attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left...` - ); - }, - } + // Get a path excluding `targetNode`: + const path = await OnionPaths.getOnionPath({ toExclude: targetNode }); + const result = await sendOnionRequestSnodeDestNoRetries( + path, + targetNode, + headers, + body, + allow401s, + associatedWith ); - - return retriedResult as SnodeResponse | undefined; + return result as SnodeResponse | undefined; } catch (e) { window?.log?.warn('onionFetchRetryable failed ', e.message); if (e?.errno === 'ENETUNREACH') { @@ -1166,11 +1175,11 @@ async function lokiOnionFetch({ } export const Onions = { - sendOnionRequestHandlingSnodeEject, + sendOnionRequestHandlingSnodeEjectNoRetries, incrementBadSnodeCountOrDrop, decodeOnionResult, - lokiOnionFetch, - sendOnionRequestSnodeDest, + lokiOnionFetchNoRetries, + sendOnionRequestSnodeDestNoRetries, processOnionResponse, processOnionResponseV4, isFinalDestinationSnode, diff --git a/ts/session/apis/snode_api/onsResolve.ts b/ts/session/apis/snode_api/onsResolve.ts index ad31bb3f25..ac152caa86 100644 --- a/ts/session/apis/snode_api/onsResolve.ts +++ b/ts/session/apis/snode_api/onsResolve.ts @@ -9,25 +9,14 @@ import { } from '../../utils/String'; import { NotFoundError } from '../../utils/errors'; import { OnsResolveSubRequest } from './SnodeRequestTypes'; -import { doSnodeBatchRequest } from './batchRequest'; +import { BatchRequests } from './batchRequest'; import { GetNetworkTime } from './getNetworkTime'; -import { getRandomSnode } from './snodePool'; +import { SnodePool } from './snodePool'; // ONS name can have [a-zA-Z0-9_-] except that - is not allowed as start or end // do not define a regex but rather create it on the fly to avoid https://stackoverflow.com/questions/3891641/regex-test-only-works-every-other-time const onsNameRegex = '^\\w([\\w-]*[\\w])?$'; -function buildOnsResolveRequests(base64EncodedNameHash: string): Array { - const request: OnsResolveSubRequest = { - method: 'oxend_request', - params: { - endpoint: 'ons_resolve', - params: { type: 0, name_hash: base64EncodedNameHash }, - }, - }; - return [request]; -} - async function getSessionIDForOnsName(onsNameCase: string) { const validationCount = 3; @@ -36,19 +25,23 @@ async function getSessionIDForOnsName(onsNameCase: string) { const nameAsData = stringToUint8Array(onsNameLowerCase); const nameHash = sodium.crypto_generichash(sodium.crypto_generichash_BYTES, nameAsData); const base64EncodedNameHash = fromUInt8ArrayToBase64(nameHash); - + const subRequest = new OnsResolveSubRequest(base64EncodedNameHash); if (isTestNet()) { window.log.info('OnsResolve response are not registered to anything on testnet'); throw new Error('OnsResolve response are not registered to anything on testnet'); } - const onsResolveRequests = buildOnsResolveRequests(base64EncodedNameHash); - // we do this request with validationCount snodes const promises = range(0, validationCount).map(async () => { - const targetNode = await getRandomSnode(); + const targetNode = await SnodePool.getRandomSnode(); - const results = await doSnodeBatchRequest(onsResolveRequests, targetNode, 4000, null); + const results = await BatchRequests.doUnsignedSnodeBatchRequestNoRetries( + [subRequest], + targetNode, + 10000, + null, + false + ); const firstResult = results[0]; if (!firstResult || firstResult.code !== 200 || !firstResult.body) { throw new Error('ONSresolve:Failed to resolve ONS'); diff --git a/ts/session/apis/snode_api/pollingTypes.ts b/ts/session/apis/snode_api/pollingTypes.ts new file mode 100644 index 0000000000..0735b14ed5 --- /dev/null +++ b/ts/session/apis/snode_api/pollingTypes.ts @@ -0,0 +1,8 @@ +// That named-tuple syntax breaks prettier linting and formatting on the whole file it is used currently, so we keep it separately. + +import { GroupPubkeyType } from 'libsession_util_nodejs'; +import { ConversationTypeEnum } from '../../../models/types'; + +export type PollForUs = [pubkey: string, type: ConversationTypeEnum.PRIVATE]; +export type PollForLegacy = [pubkey: string, type: ConversationTypeEnum.GROUP]; +export type PollForGroup = [pubkey: GroupPubkeyType, type: ConversationTypeEnum.GROUPV2]; diff --git a/ts/session/apis/snode_api/retrieveRequest.ts b/ts/session/apis/snode_api/retrieveRequest.ts index 1d5d86013c..f01ac4c23d 100644 --- a/ts/session/apis/snode_api/retrieveRequest.ts +++ b/ts/session/apis/snode_api/retrieveRequest.ts @@ -1,134 +1,240 @@ -import { isArray, omit } from 'lodash'; +import { GroupPubkeyType } from 'libsession_util_nodejs'; +import { isArray } from 'lodash'; import { Snode } from '../../../data/types'; -import { updateIsOnline } from '../../../state/ducks/onion'; -import { doSnodeBatchRequest } from './batchRequest'; import { GetNetworkTime } from './getNetworkTime'; -import { SnodeNamespace, SnodeNamespaces } from './namespaces'; +import { SnodeNamespace, SnodeNamespaces, SnodeNamespacesGroup } from './namespaces'; +import { UserGroupsWrapperActions } from '../../../webworker/workers/browser/libsession_worker_interface'; +import { PubKey } from '../../types'; import { DURATION, TTL_DEFAULT } from '../../constants'; -import { UserUtils } from '../../utils'; import { sleepFor } from '../../utils/Promise'; import { SnodeResponseError } from '../../utils/errors'; import { - RetrieveLegacyClosedGroupSubRequestType, - RetrieveSubRequestType, - UpdateExpiryOnNodeSubRequest, + RetrieveGroupSubRequest, + RetrieveLegacyClosedGroupSubRequest, + RetrieveUserSubRequest, + UpdateExpiryOnNodeGroupSubRequest, + UpdateExpiryOnNodeUserSubRequest, } from './SnodeRequestTypes'; -import { SnodeSignature } from './snodeSignatures'; +import { BatchRequests } from './batchRequest'; import { RetrieveMessagesResultsBatched, RetrieveMessagesResultsContent } from './types'; +import { ed25519Str } from '../../utils/String'; +import { NetworkTime } from '../../../util/NetworkTime'; +type RetrieveParams = { + pubkey: string; + last_hash: string; + timestamp: number; + max_size: number | undefined; +}; + +async function retrieveRequestForUs({ + namespace, + retrieveParam, +}: { + namespace: SnodeNamespaces; + retrieveParam: RetrieveParams; +}) { + if (!SnodeNamespace.isUserConfigNamespace(namespace) && namespace !== SnodeNamespaces.Default) { + throw new Error(`retrieveRequestForUs not a valid namespace to retrieve as us:${namespace}`); + } + return new RetrieveUserSubRequest({ + last_hash: retrieveParam.last_hash, + max_size: retrieveParam.max_size, + namespace, + }); +} + +type NamespaceAndLastHash = { lastHash: string | null; namespace: SnodeNamespaces }; + +/** + * Retrieve for legacy groups are not authenticated so no need to sign the request + */ +function retrieveRequestForLegacyGroup({ + namespace, + ourPubkey, + pubkey, + retrieveParam, +}: { + pubkey: string; + namespace: SnodeNamespaces.LegacyClosedGroup; + ourPubkey: string; + retrieveParam: RetrieveParams; +}) { + if (pubkey === ourPubkey || !PubKey.is05Pubkey(pubkey)) { + throw new Error( + 'namespace -10 can only be used to retrieve messages from a legacy closed group (prefix 05)' + ); + } + if (namespace !== SnodeNamespaces.LegacyClosedGroup) { + throw new Error(`retrieveRequestForLegacyGroup namespace can only be -10`); + } + + // if we give a timestamp, a signature will be required by the service node, and we don't want to provide one as this is an unauthenticated namespace + return new RetrieveLegacyClosedGroupSubRequest({ + last_hash: retrieveParam.last_hash, + max_size: retrieveParam.max_size, + legacyGroupPk: pubkey, + }); +} + +/** + * Retrieve for groups (03-prefixed) are authenticated with the admin key if we have it, or with our sub key auth + */ +async function retrieveRequestForGroup({ + namespace, + groupPk, + retrieveParam, +}: { + groupPk: GroupPubkeyType; + namespace: SnodeNamespacesGroup; + retrieveParam: RetrieveParams; +}) { + if (!PubKey.is03Pubkey(groupPk)) { + throw new Error('retrieveRequestForGroup: not a 03 group'); + } + if (!SnodeNamespace.isGroupNamespace(namespace)) { + throw new Error(`retrieveRequestForGroup: not a groupNamespace: ${namespace}`); + } + const group = await UserGroupsWrapperActions.getGroup(groupPk); + + return new RetrieveGroupSubRequest({ + last_hash: retrieveParam.last_hash, + namespace, + max_size: retrieveParam.max_size, + groupDetailsNeededForSignature: group, + }); +} + +type RetrieveSubRequestType = + | RetrieveLegacyClosedGroupSubRequest + | RetrieveUserSubRequest + | RetrieveGroupSubRequest + | UpdateExpiryOnNodeUserSubRequest + | UpdateExpiryOnNodeGroupSubRequest; + +/** + * build the Array of retrieveRequests to do on the next poll, given the specified namespaces, lastHash, pubkey and hashes to bump (expiry) + * Note: exported only for testing purposes + * @param namespacesAndLastHashes + * @param pubkey + * @param ourPubkey + * @param configHashesToBump + * @returns + */ async function buildRetrieveRequest( - lastHashes: Array, + namespacesAndLastHashes: Array, pubkey: string, - namespaces: Array, ourPubkey: string, configHashesToBump: Array | null -): Promise> { - const maxSizeMap = SnodeNamespace.maxSizeMap(namespaces); +) { + const isUs = pubkey === ourPubkey; + const maxSizeMap = SnodeNamespace.maxSizeMap(namespacesAndLastHashes.map(m => m.namespace)); + const now = NetworkTime.now(); + const retrieveRequestsParams: Array = await Promise.all( - namespaces.map(async (namespace, index) => { + namespacesAndLastHashes.map(async ({ lastHash, namespace }) => { const foundMaxSize = maxSizeMap.find(m => m.namespace === namespace)?.maxSize; const retrieveParam = { pubkey, - last_hash: lastHashes.at(index) || '', - namespace, - timestamp: GetNetworkTime.getNowWithNetworkOffset(), + last_hash: lastHash || '', + timestamp: now, max_size: foundMaxSize, }; - if (namespace === SnodeNamespaces.ClosedGroupMessage) { - if (pubkey === ourPubkey || !pubkey.startsWith('05')) { - throw new Error( - 'namespace -10 can only be used to retrieve messages from a legacy closed group (prefix 05)' - ); + if (namespace === SnodeNamespaces.LegacyClosedGroup) { + return retrieveRequestForLegacyGroup({ namespace, ourPubkey, pubkey, retrieveParam }); + } + + if (PubKey.is03Pubkey(pubkey)) { + if (!SnodeNamespace.isGroupNamespace(namespace)) { + // either config or messages namespaces for 03 groups + throw new Error(`tried to poll from a non 03 group namespace ${namespace}`); } - const retrieveLegacyClosedGroup = { - ...retrieveParam, - namespace, - }; - const retrieveParamsLegacy: RetrieveLegacyClosedGroupSubRequestType = { - method: 'retrieve', - params: omit(retrieveLegacyClosedGroup, 'timestamp'), // if we give a timestamp, a signature will be required by the service node, and we don't want to provide one as this is an unauthenticated namespace - }; - - return retrieveParamsLegacy; + return retrieveRequestForGroup({ namespace, groupPk: pubkey, retrieveParam }); } // all legacy closed group retrieves are unauthenticated and run above. // if we get here, this can only be a retrieve for our own swarm, which must be authenticated - if ( - !SnodeNamespace.isUserConfigNamespace(namespace) && - namespace !== SnodeNamespaces.UserMessages - ) { - throw new Error(`not a legacy closed group. namespace can only be 0 and was ${namespace}`); - } - if (pubkey !== ourPubkey) { - throw new Error('not a legacy closed group. pubkey can only be ours'); - } - const signatureArgs = { ...retrieveParam, method: 'retrieve' as const, ourPubkey }; - const signatureBuilt = await SnodeSignature.getSnodeSignatureParams(signatureArgs); - const retrieve: RetrieveSubRequestType = { - method: 'retrieve', - params: { ...retrieveParam, ...signatureBuilt }, - }; - return retrieve; + return retrieveRequestForUs({ namespace, retrieveParam }); }) ); - if (configHashesToBump?.length) { - const expiry = GetNetworkTime.getNowWithNetworkOffset() + TTL_DEFAULT.CONFIG_MESSAGE; - const signResult = await SnodeSignature.generateUpdateExpirySignature({ + const expiryMs = NetworkTime.now() + TTL_DEFAULT.CONFIG_MESSAGE; + + if (configHashesToBump?.length && isUs) { + const request = new UpdateExpiryOnNodeUserSubRequest({ + expiryMs, + messagesHashes: configHashesToBump, shortenOrExtend: '', - timestamp: expiry, - messageHashes: configHashesToBump, }); - if (!signResult) { + retrieveRequestsParams.push(request); + return retrieveRequestsParams; + } + + if (configHashesToBump?.length && PubKey.is03Pubkey(pubkey)) { + const group = await UserGroupsWrapperActions.getGroup(pubkey); + + if (!group) { window.log.warn( - `SnodeSignature.generateUpdateExpirySignature returned result empty for hashes ${configHashesToBump}` + `trying to retrieve for group ${ed25519Str( + pubkey + )} but we are missing the details in the user group wrapper` ); - } else { - const expireParams: UpdateExpiryOnNodeSubRequest = { - method: 'expire', - params: { - messages: configHashesToBump, - pubkey: UserUtils.getOurPubKeyStrFromCache(), - expiry, - signature: signResult.signature, - pubkey_ed25519: signResult.pubkey_ed25519, - }, - }; - - retrieveRequestsParams.push(expireParams); + throw new Error('retrieve request is missing group details'); } + + retrieveRequestsParams.push( + new UpdateExpiryOnNodeGroupSubRequest({ + expiryMs, + messagesHashes: configHashesToBump, + shortenOrExtend: '', + groupDetailsNeededForSignature: group, + }) + ); } return retrieveRequestsParams; } -async function retrieveNextMessages( +/** + * + * @param targetNode the node to make the request to + * @param associatedWith the pubkey for which this request is, used to handle 421 errors + * @param namespacesAndLastHashes the details of the retrieve request to make + * @param ourPubkey our current user pubkey + * @param configHashesToBump the config hashes to update the expiry of + * @param allow401s for groups we allow a 401 to not throw as we can be removed from it, but we still need to process part of the result. + * @returns an array of results with exactly namespacesAndLastHashes.length items in it. + * + * Note: Even if configHashesToBump is set, its result will be excluded from the return of this function, so what you get is always of namespacesAndLastHashes.length + */ +async function retrieveNextMessagesNoRetries( targetNode: Snode, - lastHashes: Array, associatedWith: string, - namespaces: Array, + namespacesAndLastHashes: Array, ourPubkey: string, - configHashesToBump: Array | null + configHashesToBump: Array | null, + allow401s: boolean ): Promise { - if (namespaces.length !== lastHashes.length) { - throw new Error('namespaces and last hashes do not match'); - } - - const retrieveRequestsParams = await buildRetrieveRequest( - lastHashes, + const rawRequests = await buildRetrieveRequest( + namespacesAndLastHashes, associatedWith, - namespaces, ourPubkey, configHashesToBump ); + // let exceptions bubble up // no retry for this one as this a call we do every few seconds while polling for messages const timeOutMs = 10 * DURATION.SECONDS; // yes this is a long timeout for just messages, but 4s timeouts way to often... const timeoutPromise = async () => sleepFor(timeOutMs); const fetchPromise = async () => - doSnodeBatchRequest(retrieveRequestsParams, targetNode, timeOutMs, associatedWith); + BatchRequests.doUnsignedSnodeBatchRequestNoRetries( + rawRequests, + targetNode, + timeOutMs, + associatedWith, + allow401s + ); // just to make sure that we don't hang for more than timeOutMs const results = await Promise.race([timeoutPromise(), fetchPromise()]); @@ -143,9 +249,12 @@ async function retrieveNextMessages( } // the +1 is to take care of the extra `expire` method added once user config is released - if (results.length !== namespaces.length && results.length !== namespaces.length + 1) { + if ( + results.length !== namespacesAndLastHashes.length && + results.length !== namespacesAndLastHashes.length + 1 + ) { throw new Error( - `We asked for updates about ${namespaces.length} messages but got results of length ${results.length}` + `We asked for updates about ${namespacesAndLastHashes.length} messages but got results of length ${results.length}` ); } @@ -153,36 +262,42 @@ async function retrieveNextMessages( const firstResult = results[0]; if (firstResult.code !== 200) { - window?.log?.warn(`_retrieveNextMessages result is not 200 but ${firstResult.code}`); + window?.log?.warn(`retrieveNextMessagesNoRetries result is not 200 but ${firstResult.code}`); throw new Error( `_retrieveNextMessages - retrieve result is not 200 with ${targetNode.ip}:${targetNode.port} but ${firstResult.code}` ); } + if (configHashesToBump?.length) { + const lastResult = results[results.length - 1]; + if (lastResult?.code !== 200) { + // the update expiry of our config messages didn't work. + window.log.warn( + `the update expiry of our tracked config hashes didn't work: ${JSON.stringify(lastResult)}` + ); + } + } // we rely on the code of the first one to check for online status const bodyFirstResult = firstResult.body; - if (!window.inboxStore?.getState().onionPaths.isOnline) { - window.inboxStore?.dispatch(updateIsOnline(true)); - } GetNetworkTime.handleTimestampOffsetFromNetwork('retrieve', bodyFirstResult.t); + // merge results with their corresponding namespaces // NOTE: We don't want to sort messages here because the ordering depends on the snode and when it received each message. - // The last_hash for that snode has to be the last one we've received from that same snode, othwerwise we end up fetching the same messages over and over again. - return results.map((result, index) => ({ - code: result.code, - messages: result.body as RetrieveMessagesResultsContent, - namespace: namespaces[index], + // The last_hash for that snode has to be the last one we've received from that same snode, otherwise we end up fetching the same messages over and over again. + const toRet = namespacesAndLastHashes.map((n, index) => ({ + code: results[index].code, + messages: results[index].body as RetrieveMessagesResultsContent, + namespace: n.namespace, })); + return toRet; } catch (e) { window?.log?.warn('exception while parsing json of nextMessage:', e); - if (!window.inboxStore?.getState().onionPaths.isOnline) { - window.inboxStore?.dispatch(updateIsOnline(true)); - } + throw new Error( `_retrieveNextMessages - exception while parsing json of nextMessage ${targetNode.ip}:${targetNode.port}: ${e?.message}` ); } } -export const SnodeAPIRetrieve = { retrieveNextMessages }; +export const SnodeAPIRetrieve = { retrieveNextMessagesNoRetries, buildRetrieveRequest }; diff --git a/ts/session/apis/snode_api/revokeSubaccount.ts b/ts/session/apis/snode_api/revokeSubaccount.ts new file mode 100644 index 0000000000..5a296a66ec --- /dev/null +++ b/ts/session/apis/snode_api/revokeSubaccount.ts @@ -0,0 +1,47 @@ +import { GroupPubkeyType } from 'libsession_util_nodejs'; + +import { PubKey } from '../../types'; +import { SubaccountRevokeSubRequest, SubaccountUnrevokeSubRequest } from './SnodeRequestTypes'; +import { NetworkTime } from '../../../util/NetworkTime'; + +export type RevokeChanges = Array<{ + action: 'revoke_subaccount' | 'unrevoke_subaccount'; + tokenToRevokeHex: string; +}>; + +async function getRevokeSubaccountParams( + groupPk: GroupPubkeyType, + secretKey: Uint8Array, + { + revokeChanges, + unrevokeChanges, + }: { revokeChanges: RevokeChanges; unrevokeChanges: RevokeChanges } +) { + if (!PubKey.is03Pubkey(groupPk)) { + throw new Error('revokeSubaccountForGroup: not a 03 group'); + } + + const revokeSubRequest = revokeChanges.length + ? new SubaccountRevokeSubRequest({ + groupPk, + revokeTokenHex: revokeChanges.map(m => m.tokenToRevokeHex), + timestamp: NetworkTime.now(), + secretKey, + }) + : undefined; + const unrevokeSubRequest = unrevokeChanges.length + ? new SubaccountUnrevokeSubRequest({ + groupPk, + revokeTokenHex: unrevokeChanges.map(m => m.tokenToRevokeHex), + timestamp: NetworkTime.now(), + secretKey, + }) + : undefined; + + return { + revokeSubRequest, + unrevokeSubRequest, + }; +} + +export const SnodeAPIRevoke = { getRevokeSubaccountParams }; diff --git a/ts/session/apis/snode_api/sessionRpc.ts b/ts/session/apis/snode_api/sessionRpc.ts index bd79521a6e..f27f244303 100644 --- a/ts/session/apis/snode_api/sessionRpc.ts +++ b/ts/session/apis/snode_api/sessionRpc.ts @@ -19,20 +19,23 @@ export interface LokiFetchOptions { /** * A small wrapper around node-fetch which deserializes response - * returns insecureNodeFetch response or false + * returned by insecureNodeFetch or false. + * Does not do any retries, nor eject snodes if needed */ -async function doRequest({ +async function doRequestNoRetries({ options, url, associatedWith, targetNode, timeout, + allow401s, }: { url: string; options: LokiFetchOptions; targetNode?: Snode; associatedWith: string | null; timeout: number; + allow401s: boolean; }): Promise { const method = options.method || 'GET'; @@ -50,11 +53,12 @@ async function doRequest({ ? true : window.sessionFeatureFlags?.useOnionRequests; if (useOnionRequests && targetNode) { - const fetchResult = await Onions.lokiOnionFetch({ + const fetchResult = await Onions.lokiOnionFetchNoRetries({ targetNode, body: fetchOptions.body, headers: fetchOptions.headers, associatedWith: associatedWith || undefined, + allow401s, }); if (!fetchResult) { return undefined; @@ -108,12 +112,13 @@ async function doRequest({ * -> if the targetNode gets too many errors => we will need to try to do this request again with another target node * The */ -export async function snodeRpc( +async function snodeRpcNoRetries( { method, params, targetNode, associatedWith, + allow401s, timeout = 10000, }: { method: string; @@ -121,6 +126,7 @@ export async function snodeRpc( targetNode: Snode; associatedWith: string | null; timeout?: number; + allow401s: boolean; } // the user pubkey this call is for. if the onion request fails, this is used to handle the error for this user swarm for instance ): Promise { const url = `https://${targetNode.ip}:${targetNode.port}/storage_rpc/v1`; @@ -138,11 +144,14 @@ export async function snodeRpc( agent: null, }; - return doRequest({ + return doRequestNoRetries({ url, options: fetchOptions, targetNode, associatedWith, timeout, + allow401s, }); } + +export const SessionRpc = { snodeRpcNoRetries }; diff --git a/ts/session/apis/snode_api/signature/groupSignature.ts b/ts/session/apis/snode_api/signature/groupSignature.ts new file mode 100644 index 0000000000..6d4c0851ac --- /dev/null +++ b/ts/session/apis/snode_api/signature/groupSignature.ts @@ -0,0 +1,326 @@ +import { + GroupPubkeyType, + PubkeyType, + Uint8ArrayLen100, + Uint8ArrayLen64, + UserGroupsGet, + WithGroupPubkey, +} from 'libsession_util_nodejs'; +import { isEmpty, isString } from 'lodash'; +import { MetaGroupWrapperActions } from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { GroupUpdateInviteMessage } from '../../../messages/outgoing/controlMessage/group_v2/to_user/GroupUpdateInviteMessage'; +import { GroupUpdatePromoteMessage } from '../../../messages/outgoing/controlMessage/group_v2/to_user/GroupUpdatePromoteMessage'; +import { StringUtils, UserUtils } from '../../../utils'; +import { fromUInt8ArrayToBase64, stringToUint8Array } from '../../../utils/String'; +import { PreConditionFailed } from '../../../utils/errors'; +import { SnodeNamespacesGroup } from '../namespaces'; +import { SignedGroupHashesParams, WithMessagesHashes, WithShortenOrExtend } from '../types'; +import { SignatureShared } from './signatureShared'; +import { SnodeSignatureResult } from './snodeSignatures'; +import { getSodiumRenderer } from '../../../crypto'; +import { NetworkTime } from '../../../../util/NetworkTime'; + +async function getGroupInviteMessage({ + groupName, + member, + secretKey, + groupPk, +}: { + member: PubkeyType; + groupName: string; + secretKey: Uint8ArrayLen64; // len 64 + groupPk: GroupPubkeyType; +}) { + const sodium = await getSodiumRenderer(); + const createAtNetworkTimestamp = NetworkTime.now(); + + if (UserUtils.isUsFromCache(member)) { + throw new Error('getGroupInviteMessage: we cannot invite ourselves'); + } + + // Note: as the signature is built with the timestamp here, we cannot override the timestamp later on the sending pipeline + const adminSignature = sodium.crypto_sign_detached( + stringToUint8Array(`INVITE${member}${createAtNetworkTimestamp}`), + secretKey + ); + const memberAuthData = await MetaGroupWrapperActions.makeSwarmSubAccount(groupPk, member); + + const invite = new GroupUpdateInviteMessage({ + groupName, + groupPk, + createAtNetworkTimestamp, + adminSignature, + memberAuthData, + expirationType: 'unknown', // an invite is not expiring + expireTimer: 0, + }); + return invite; +} + +async function getGroupPromoteMessage({ + member, + secretKey, + groupPk, + groupName, +}: { + member: PubkeyType; + secretKey: Uint8ArrayLen64; // len 64 + groupPk: GroupPubkeyType; + groupName: string; +}) { + const createAtNetworkTimestamp = NetworkTime.now(); + + if (UserUtils.isUsFromCache(member)) { + throw new Error('getGroupPromoteMessage: we cannot promote ourselves'); + } + + const msg = new GroupUpdatePromoteMessage({ + groupPk, + createAtNetworkTimestamp, + groupIdentitySeed: secretKey.slice(0, 32), // the seed is the first 32 bytes of the secretkey + expirationType: 'unknown', // a promote message is not expiring + expireTimer: 0, + groupName, + }); + return msg; +} + +type ParamsShared = { + groupPk: GroupPubkeyType; + namespace: SnodeNamespacesGroup; + method: 'retrieve' | 'store' | 'delete_all'; +}; + +type SigParamsAdmin = ParamsShared & { + groupIdentityPrivKey: Uint8ArrayLen64; +}; + +type SigParamsSubaccount = ParamsShared & { + authData: Uint8ArrayLen100; +}; + +export type SigResultAdmin = Pick & { + pubkey: GroupPubkeyType; // this is the 03 pubkey of the corresponding group +}; + +export type SigResultSubAccount = SigResultAdmin & { + subaccount: string; + subaccount_sig: string; +}; + +async function getSnodeGroupSubAccountSignatureParams( + params: SigParamsSubaccount +): Promise { + const { signatureTimestamp, toSign } = + SignatureShared.getVerificationDataForStoreRetrieve(params); + + const sigResult = await MetaGroupWrapperActions.swarmSubaccountSign( + params.groupPk, + toSign, + params.authData + ); + return { + ...sigResult, + timestamp: signatureTimestamp, + pubkey: params.groupPk, + }; +} + +async function getSnodeGroupAdminSignatureParams(params: SigParamsAdmin): Promise { + const sigData = await SignatureShared.getSnodeSignatureShared({ + pubKey: params.groupPk, + method: params.method, + namespace: params.namespace, + privKey: params.groupIdentityPrivKey, + }); + return { ...sigData, pubkey: params.groupPk }; +} + +export type GroupDetailsNeededForSignature = Pick< + UserGroupsGet, + 'pubkeyHex' | 'authData' | 'secretKey' +>; + +type StoreOrRetrieve = { method: 'store' | 'retrieve'; namespace: SnodeNamespacesGroup }; +type DeleteHashes = { method: 'delete'; hashes: Array }; +type DeleteAllNonConfigs = { method: 'delete_all'; namespace: SnodeNamespacesGroup }; + +async function getSnodeGroupSignature({ + group, + ...args +}: { + group: GroupDetailsNeededForSignature | null; +} & (StoreOrRetrieve | DeleteHashes | DeleteAllNonConfigs)): Promise< + SigResultSubAccount | SigResultAdmin +> { + if (!group) { + throw new Error(`getSnodeGroupSignature: we need GroupDetailsNeededForSignature`); + } + const { pubkeyHex: groupPk, secretKey, authData } = group; + + const groupSecretKey = secretKey && !isEmpty(secretKey) ? secretKey : null; + const groupAuthData = authData && !isEmpty(authData) ? authData : null; + + if (args.method === 'delete_all' && isEmpty(secretKey)) { + throw new Error('getSnodeGroupSignature: delete_all needs an adminSecretKey'); + } + + if (groupSecretKey) { + if (args.method === 'delete') { + return getGroupSignatureByHashesParams({ + groupPk, + method: args.method, + messagesHashes: args.hashes, + group, + }); + } + return getSnodeGroupAdminSignatureParams({ + method: args.method, + namespace: args.namespace, + groupPk, + groupIdentityPrivKey: groupSecretKey, + }); + } + if (groupAuthData) { + if (args.method === 'delete') { + return getGroupSignatureByHashesParams({ + groupPk, + method: args.method, + messagesHashes: args.hashes, + group, + }); + } + return getSnodeGroupSubAccountSignatureParams({ + groupPk, + method: args.method, + namespace: args.namespace, + authData: groupAuthData, + }); + } + throw new Error(`getSnodeGroupSignature: needs either groupSecretKey or authData`); +} + +async function signDataWithAdminSecret( + verificationString: string | Uint8Array, + group: Pick +) { + const verificationData = isString(verificationString) + ? StringUtils.encode(verificationString, 'utf8') + : verificationString; + const message = new Uint8Array(verificationData); + + if (!group) { + throw new Error('signDataWithAdminSecret group was not found'); + } + const { secretKey } = group; + + const groupSecretKey = secretKey && !isEmpty(secretKey) ? secretKey : null; + if (!groupSecretKey) { + throw new Error('groupSecretKey is empty'); + } + const sodium = await getSodiumRenderer(); + + return { + signature: fromUInt8ArrayToBase64(sodium.crypto_sign_detached(message, groupSecretKey)), + }; +} + +// this is kind of duplicated with `generateUpdateExpirySignature`, but needs to use the authData when secretKey is not available +async function generateUpdateExpiryGroupSignature({ + shortenOrExtend, + expiryMs, + messagesHashes, + group, +}: WithMessagesHashes & + WithShortenOrExtend & { + group: GroupDetailsNeededForSignature | null; + expiryMs: number; + }) { + if (!group || isEmpty(group.pubkeyHex)) { + throw new PreConditionFailed('generateUpdateExpiryGroupSignature groupPk is empty'); + } + + // "expire" || ShortenOrExtend || expiry || messages[0] || ... || messages[N] + const verificationString = `expire${shortenOrExtend}${expiryMs}${messagesHashes.join('')}`; + const verificationData = StringUtils.encode(verificationString, 'utf8'); + const message = new Uint8Array(verificationData); + + if (!group) { + throw new Error('generateUpdateExpiryGroupSignature group was not found'); + } + const { pubkeyHex: groupPk, secretKey, authData } = group; + + const groupSecretKey = secretKey && !isEmpty(secretKey) ? secretKey : null; + const groupAuthData = authData && !isEmpty(authData) ? authData : null; + if (!groupSecretKey && !groupAuthData) { + throw new Error(`retrieveRequestForGroup: needs either groupSecretKey or authData`); + } + + const sodium = await getSodiumRenderer(); + const shared = { expiry: expiryMs, pubkey: groupPk }; // expiry and the other fields come from what the expire endpoint expects + + if (groupSecretKey) { + return { + signature: fromUInt8ArrayToBase64(sodium.crypto_sign_detached(message, groupSecretKey)), + ...shared, + }; + } + if (!groupAuthData) { + // typescript should see this already but doesn't, so let's enforce it. + throw new Error( + `retrieveRequestForGroup: needs either groupSecretKey or authData but both are empty` + ); + } + const subaccountSign = await MetaGroupWrapperActions.swarmSubaccountSign( + groupPk, + message, + groupAuthData + ); + return { + ...subaccountSign, + ...shared, + }; +} + +async function getGroupSignatureByHashesParams({ + messagesHashes, + method, + group, +}: WithMessagesHashes & + WithGroupPubkey & { + method: 'delete'; + group: GroupDetailsNeededForSignature; + }): Promise { + const verificationString = `${method}${messagesHashes.join('')}`; + const message = new Uint8Array(StringUtils.encode(verificationString, 'utf8')); + const signatureTimestamp = NetworkTime.now(); + + const sodium = await getSodiumRenderer(); + try { + if (group.secretKey && !isEmpty(group.secretKey)) { + const signature = sodium.crypto_sign_detached(message, group.secretKey); + const signatureBase64 = fromUInt8ArrayToBase64(signature); + + return { + signature: signatureBase64, + pubkey: group.pubkeyHex, + messages: messagesHashes, + timestamp: signatureTimestamp, + }; + } + + throw new Error('getSnodeGroupSignatureByHashesParams needs admin secretKey set'); + } catch (e) { + window.log.warn('getSnodeGroupSignatureByHashesParams failed with: ', e.message); + throw e; + } +} + +export const SnodeGroupSignature = { + generateUpdateExpiryGroupSignature, + getGroupInviteMessage, + getGroupPromoteMessage, + getSnodeGroupSignature, + getGroupSignatureByHashesParams, + signDataWithAdminSecret, +}; diff --git a/ts/session/apis/snode_api/signature/signatureShared.ts b/ts/session/apis/snode_api/signature/signatureShared.ts new file mode 100644 index 0000000000..e9d1df41e8 --- /dev/null +++ b/ts/session/apis/snode_api/signature/signatureShared.ts @@ -0,0 +1,75 @@ +import { GroupPubkeyType, Uint8ArrayLen100, Uint8ArrayLen64 } from 'libsession_util_nodejs'; +import { isEmpty } from 'lodash'; +import { getSodiumRenderer } from '../../../crypto'; +import { PubKey } from '../../../types'; +import { StringUtils } from '../../../utils'; +import { fromUInt8ArrayToBase64 } from '../../../utils/String'; +import { NetworkTime } from '../../../../util/NetworkTime'; + +export type SnodeSigParamsShared = { + namespace: number | null | 'all'; // 'all' can be used to clear all namespaces (during account deletion) + method: 'retrieve' | 'store' | 'delete_all'; +}; + +export type SnodeSigParamsAdminGroup = SnodeSigParamsShared & { + groupPk: GroupPubkeyType; + privKey: Uint8ArrayLen64; // len 64 +}; + +export type SnodeSigParamsSubAccount = SnodeSigParamsShared & { + groupPk: GroupPubkeyType; + authData: Uint8ArrayLen100; // len 100 +}; + +export type SnodeSigParamsUs = SnodeSigParamsShared & { + pubKey: string; + privKey: Uint8ArrayLen64; // len 64 +}; + +function getVerificationDataForStoreRetrieve(params: SnodeSigParamsShared) { + const signatureTimestamp = NetworkTime.now(); + const verificationString = `${params.method}${ + params.namespace === 0 ? '' : params.namespace + }${signatureTimestamp}`; + const verificationData = StringUtils.encode(verificationString, 'utf8'); + return { + toSign: new Uint8Array(verificationData), + signatureTimestamp, + }; +} + +function isSigParamsForGroupAdmin( + sigParams: SnodeSigParamsAdminGroup | SnodeSigParamsUs | SnodeSigParamsSubAccount +): sigParams is SnodeSigParamsAdminGroup { + const asGr = sigParams as SnodeSigParamsAdminGroup; + return PubKey.is03Pubkey(asGr.groupPk) && !isEmpty(asGr.privKey); +} + +async function getSnodeSignatureShared(params: SnodeSigParamsAdminGroup | SnodeSigParamsUs) { + const { signatureTimestamp, toSign } = getVerificationDataForStoreRetrieve(params); + + try { + const sodium = await getSodiumRenderer(); + const signature = sodium.crypto_sign_detached(toSign, params.privKey); + const signatureBase64 = fromUInt8ArrayToBase64(signature); + if (isSigParamsForGroupAdmin(params)) { + return { + timestamp: signatureTimestamp, + signature: signatureBase64, + pubkey: params.groupPk, + }; + } + return { + timestamp: signatureTimestamp, + signature: signatureBase64, + }; + } catch (e) { + window.log.warn('getSnodeShared failed with: ', e.message); + throw e; + } +} + +export const SignatureShared = { + getSnodeSignatureShared, + getVerificationDataForStoreRetrieve, +}; diff --git a/ts/session/apis/snode_api/signature/snodeSignatures.ts b/ts/session/apis/snode_api/signature/snodeSignatures.ts new file mode 100644 index 0000000000..fd49eb36fe --- /dev/null +++ b/ts/session/apis/snode_api/signature/snodeSignatures.ts @@ -0,0 +1,252 @@ +import { + GroupPubkeyType, + PubkeyType, + Uint8ArrayLen100, + Uint8ArrayLen64, +} from 'libsession_util_nodejs'; +import { isEmpty } from 'lodash'; +import { toFixedUint8ArrayOfLength } from '../../../../types/sqlSharedTypes'; +import { getSodiumRenderer } from '../../../crypto'; +import { PubKey } from '../../../types'; +import { StringUtils, UserUtils } from '../../../utils'; +import { fromHexToArray, fromUInt8ArrayToBase64 } from '../../../utils/String'; +import { PreConditionFailed } from '../../../utils/errors'; +import { SignedHashesParams, WithMessagesHashes, WithShortenOrExtend } from '../types'; +import { NetworkTime } from '../../../../util/NetworkTime'; +import { WithSignature, WithTimestamp } from '../../../types/with'; + +export type SnodeSignatureResult = WithSignature & + WithTimestamp & { + pubkey_ed25519: string; + pubkey: string; // this is the x25519 key of the pubkey we are doing the request to (ourself for our swarm usually) + }; + +async function getSnodeSignatureByHashesParams({ + messagesHashes, + method, + pubkey, +}: WithMessagesHashes & { + pubkey: PubkeyType; + method: 'delete'; +}): Promise { + const ourEd25519Key = await UserUtils.getUserED25519KeyPair(); + + if (!ourEd25519Key) { + const err = `getSnodeSignatureParams "${method}": User has no getUserED25519KeyPair()`; + window.log.warn(err); + throw new Error(err); + } + const edKeyPrivBytes = fromHexToArray(ourEd25519Key?.privKey); + const verificationData = StringUtils.encode(`${method}${messagesHashes.join('')}`, 'utf8'); + const message = new Uint8Array(verificationData); + + const sodium = await getSodiumRenderer(); + try { + const signature = sodium.crypto_sign_detached(message, edKeyPrivBytes); + const signatureBase64 = fromUInt8ArrayToBase64(signature); + + return { + signature: signatureBase64, + pubkey_ed25519: ourEd25519Key.pubKey as PubkeyType, + pubkey, + messages: messagesHashes, + }; + } catch (e) { + window.log.warn('getSnodeSignatureParams failed with: ', e.message); + throw e; + } +} + +type SnodeSigParamsShared = { + namespace: number | null | 'all'; // 'all' can be used to clear all namespaces (during account deletion) + method: 'retrieve' | 'store' | 'delete_all'; +}; + +type SnodeSigParamsAdminGroup = SnodeSigParamsShared & { + groupPk: GroupPubkeyType; + privKey: Uint8ArrayLen64; // len 64 +}; + +type SnodeSigParamsSubAccount = SnodeSigParamsShared & { + groupPk: GroupPubkeyType; + authData: Uint8ArrayLen100; // len 100 +}; + +type SnodeSigParamsUs = SnodeSigParamsShared & { + pubKey: string; + privKey: Uint8ArrayLen64; // len 64 +}; + +function isSigParamsForGroupAdmin( + sigParams: SnodeSigParamsAdminGroup | SnodeSigParamsUs | SnodeSigParamsSubAccount +): sigParams is SnodeSigParamsAdminGroup { + const asGr = sigParams as SnodeSigParamsAdminGroup; + return PubKey.is03Pubkey(asGr.groupPk) && !isEmpty(asGr.privKey); +} + +function getVerificationData(params: SnodeSigParamsShared) { + const signatureTimestamp = NetworkTime.now(); + const verificationData = StringUtils.encode( + `${params.method}${params.namespace === 0 ? '' : params.namespace}${signatureTimestamp}`, + 'utf8' + ); + return { + toSign: new Uint8Array(verificationData), + signatureTimestamp, + }; +} + +async function getSnodeSignatureShared(params: SnodeSigParamsAdminGroup | SnodeSigParamsUs) { + const { signatureTimestamp, toSign } = getVerificationData(params); + + try { + const sodium = await getSodiumRenderer(); + const signature = sodium.crypto_sign_detached(toSign, params.privKey); + const signatureBase64 = fromUInt8ArrayToBase64(signature); + if (isSigParamsForGroupAdmin(params)) { + return { + timestamp: signatureTimestamp, + signature: signatureBase64, + pubkey: params.groupPk, + }; + } + return { + timestamp: signatureTimestamp, + signature: signatureBase64, + }; + } catch (e) { + window.log.warn('getSnodeShared failed with: ', e.message); + throw e; + } +} + +async function getSnodeSignatureParamsUs({ + method, + namespace = 0, +}: Pick): Promise { + const ourEd25519Key = await UserUtils.getUserED25519KeyPairBytes(); + const ourEd25519PubKey = await UserUtils.getUserED25519KeyPair(); + + if (!ourEd25519Key || !ourEd25519PubKey) { + const err = `getSnodeSignatureParams "${method}": User has no getUserED25519KeyPairBytes()`; + window.log.warn(err); + throw new Error(err); + } + + const edKeyPrivBytes = ourEd25519Key.privKeyBytes; + + const lengthCheckedPrivKey = toFixedUint8ArrayOfLength(edKeyPrivBytes, 64); + const sigData = await getSnodeSignatureShared({ + pubKey: UserUtils.getOurPubKeyStrFromCache(), + method, + namespace, + privKey: lengthCheckedPrivKey.buffer, + }); + + const us = UserUtils.getOurPubKeyStrFromCache(); + return { + ...sigData, + pubkey_ed25519: ourEd25519PubKey.pubKey, + pubkey: us, + }; +} + +async function generateUpdateExpirySignature({ + shortenOrExtend, + timestamp, + messagesHashes, + ed25519Privkey, + ed25519Pubkey, +}: WithMessagesHashes & + WithShortenOrExtend & + WithTimestamp & { + ed25519Privkey: Uint8Array; // len 64 + ed25519Pubkey: string; + }): Promise { + // "expire" || ShortenOrExtend || expiry || messages[0] || ... || messages[N] + const verificationString = `expire${shortenOrExtend}${timestamp}${messagesHashes.join('')}`; + const verificationData = StringUtils.encode(verificationString, 'utf8'); + const message = new Uint8Array(verificationData); + + const sodium = await getSodiumRenderer(); + + const signature = sodium.crypto_sign_detached(message, ed25519Privkey); + const signatureBase64 = fromUInt8ArrayToBase64(signature); + + if (isEmpty(signatureBase64) || isEmpty(ed25519Pubkey)) { + throw new Error('generateUpdateExpirySignature: failed to build signature'); + } + + return { + signature: signatureBase64, + pubkey: ed25519Pubkey, + }; +} + +async function generateUpdateExpiryOurSignature({ + shortenOrExtend, + timestamp, + messagesHashes, +}: WithMessagesHashes & WithShortenOrExtend & WithTimestamp) { + const ourEd25519Key = await UserUtils.getUserED25519KeyPair(); + + if (!ourEd25519Key) { + const err = 'getSnodeSignatureParams "expiry": User has no getUserED25519KeyPair()'; + window.log.warn(err); + throw new PreConditionFailed(err); + } + + const edKeyPrivBytes = fromHexToArray(ourEd25519Key?.privKey); + + return generateUpdateExpirySignature({ + messagesHashes, + shortenOrExtend, + timestamp, + ed25519Privkey: toFixedUint8ArrayOfLength(edKeyPrivBytes, 64).buffer, + ed25519Pubkey: ourEd25519Key.pubKey, + }); +} + +async function generateGetExpiriesOurSignature({ + timestamp, + messageHashes, +}: { + timestamp: number; + messageHashes: Array; +}): Promise<(WithSignature & { pubkey_ed25519: string }) | null> { + const ourEd25519Key = await UserUtils.getUserED25519KeyPair(); + if (!ourEd25519Key) { + const err = + 'generateGetExpiriesOurSignature "get_expiries": User has no getUserED25519KeyPair()'; + window.log.warn(err); + throw new Error(err); + } + + const edKeyPrivBytes = fromHexToArray(ourEd25519Key?.privKey); + + // ("get_expiries" || timestamp || messages[0] || ... || messages[N]) + const verificationString = `get_expiries${timestamp}${messageHashes.join('')}`; + const verificationData = StringUtils.encode(verificationString, 'utf8'); + const message = new Uint8Array(verificationData); + + const sodium = await getSodiumRenderer(); + try { + const signature = sodium.crypto_sign_detached(message, edKeyPrivBytes); + const signatureBase64 = fromUInt8ArrayToBase64(signature); + + return { + signature: signatureBase64, + pubkey_ed25519: ourEd25519Key.pubKey, + }; + } catch (e) { + window.log.warn('generateSignature "get_expiries" failed with: ', e.message); + return null; + } +} + +export const SnodeSignature = { + getSnodeSignatureParamsUs, + getSnodeSignatureByHashesParams, + generateUpdateExpiryOurSignature, + generateGetExpiriesOurSignature, +}; diff --git a/ts/session/apis/snode_api/snodePool.ts b/ts/session/apis/snode_api/snodePool.ts index a0c7249056..14de241a8a 100644 --- a/ts/session/apis/snode_api/snodePool.ts +++ b/ts/session/apis/snode_api/snodePool.ts @@ -1,45 +1,21 @@ -import _, { shuffle } from 'lodash'; +import _, { isEmpty, sample, shuffle } from 'lodash'; import pRetry from 'p-retry'; import { Data } from '../../../data/data'; import { Snode } from '../../../data/types'; -import { Onions, SnodePool } from '.'; import { OnionPaths } from '../../onions'; -import { ed25519Str } from '../../utils/String'; import { SeedNodeAPI } from '../seed_node_api'; import { ServiceNodesList } from './getServiceNodesList'; import { requestSnodesForPubkeyFromNetwork } from './getSwarmFor'; -import { minimumGuardCount, ONION_REQUEST_HOPS } from '../../onions/onionPath'; - -/** - * If we get less than this snode in a swarm, we fetch new snodes for this pubkey - */ -const minSwarmSnodeCount = 3; - -/** - * If we get less than minSnodePoolCount we consider that we need to fetch the new snode pool from a seed node - * and not from those snodes. - */ -export const minSnodePoolCount = minimumGuardCount * (ONION_REQUEST_HOPS + 1) * 2; - -/** - * If we get less than this amount of snodes (24), lets try to get an updated list from those while we can - */ -export const minSnodePoolCountBeforeRefreshFromSnodes = minSnodePoolCount * 2; - -/** - * If we do a request to fetch nodes from snodes and they don't return at least - * the same `requiredSnodesForAgreement` snodes we consider that this is not a valid return. - * - * Too many nodes are not shared for this call to be trustworthy - */ -export const requiredSnodesForAgreement = 24; +import { Onions } from '.'; +import { ed25519Str } from '../../utils/String'; +import { SnodePoolConstants } from './snodePoolConstants'; let randomSnodePool: Array = []; -export function TEST_resetState() { - randomSnodePool = []; +function TEST_resetState(snodePoolForTest: Array = []) { + randomSnodePool = snodePoolForTest; swarmCache.clear(); } @@ -51,12 +27,12 @@ const swarmCache: Map> = new Map(); * Use `dropSnodeFromSwarmIfNeeded` for that * @param snodeEd25519 the snode ed25519 to drop from the snode pool */ -export async function dropSnodeFromSnodePool(snodeEd25519: string) { +async function dropSnodeFromSnodePool(snodeEd25519: string) { const exists = _.some(randomSnodePool, x => x.pubkey_ed25519 === snodeEd25519); if (exists) { _.remove(randomSnodePool, x => x.pubkey_ed25519 === snodeEd25519); window?.log?.warn( - `Droppping ${ed25519Str(snodeEd25519)} from snode pool. ${ + `Dropping ${ed25519Str(snodeEd25519)} from snode pool. ${ randomSnodePool.length } snodes remaining in randomPool` ); @@ -69,11 +45,11 @@ export async function dropSnodeFromSnodePool(snodeEd25519: string) { * excludingEd25519Snode can be used to exclude some nodes from the random list. * Useful to rebuild a path excluding existing node already in a path */ -export async function getRandomSnode(excludingEd25519Snode?: Array): Promise { +async function getRandomSnode(excludingEd25519Snode?: Array): Promise { // make sure we have a few snodes in the pool excluding the one passed as args - const requiredCount = minSnodePoolCount + (excludingEd25519Snode?.length || 0); + const requiredCount = SnodePoolConstants.minSnodePoolCount + (excludingEd25519Snode?.length || 0); if (randomSnodePool.length < requiredCount) { - await getSnodePoolFromDBOrFetchFromSeed(excludingEd25519Snode?.length); + await SnodePool.getSnodePoolFromDBOrFetchFromSeed(excludingEd25519Snode?.length); if (randomSnodePool.length < requiredCount) { window?.log?.warn( @@ -87,7 +63,11 @@ export async function getRandomSnode(excludingEd25519Snode?: Array): Pro } // We know the pool can't be empty at this point if (!excludingEd25519Snode) { - return _.sample(randomSnodePool) as Snode; + const snodePicked = sample(randomSnodePool); + if (!snodePicked) { + throw new Error('getRandomSnode failed as sample returned none '); + } + return snodePicked; } // we have to double check even after removing the nodes to exclude we still have some nodes in the list @@ -98,16 +78,20 @@ export async function getRandomSnode(excludingEd25519Snode?: Array): Pro // used for tests throw new Error(`Not enough snodes with excluding length ${excludingEd25519Snode.length}`); } - return _.sample(snodePoolExcluding) as Snode; + const snodePicked = sample(snodePoolExcluding); + if (!snodePicked) { + throw new Error('getRandomSnode failed as sample returned none '); + } + return snodePicked; } /** * This function force the snode poll to be refreshed from a random seed node or snodes if we have enough of them. * This should be called once in a day or so for when the app it kept on. */ -export async function forceRefreshRandomSnodePool(): Promise> { +async function forceRefreshRandomSnodePool(): Promise> { try { - await getSnodePoolFromDBOrFetchFromSeed(); + await SnodePool.getSnodePoolFromDBOrFetchFromSeed(); window?.log?.info( `forceRefreshRandomSnodePool: enough snodes to fetch from them, so we try using them ${randomSnodePool.length}` @@ -115,7 +99,7 @@ export async function forceRefreshRandomSnodePool(): Promise> { // this function throws if it does not have enough snodes to do it await tryToGetConsensusWithSnodesWithRetries(); - if (randomSnodePool.length < minSnodePoolCountBeforeRefreshFromSnodes) { + if (randomSnodePool.length < SnodePoolConstants.minSnodePoolCountBeforeRefreshFromSnodes) { throw new Error('forceRefreshRandomSnodePool still too small after refetching from snodes'); } } catch (e) { @@ -142,15 +126,21 @@ export async function forceRefreshRandomSnodePool(): Promise> { * Fetches from DB if snode pool is not cached, and returns it if the length is >= 12. * If length is < 12, fetches from seed an updated list of snodes */ -export async function getSnodePoolFromDBOrFetchFromSeed( +async function getSnodePoolFromDBOrFetchFromSeed( countToAddToRequirement = 0 ): Promise> { - if (randomSnodePool && randomSnodePool.length > minSnodePoolCount + countToAddToRequirement) { + if ( + randomSnodePool && + randomSnodePool.length > SnodePoolConstants.minSnodePoolCount + countToAddToRequirement + ) { return randomSnodePool; } const fetchedFromDb = await Data.getSnodePoolFromDb(); - if (!fetchedFromDb || fetchedFromDb.length <= minSnodePoolCount + countToAddToRequirement) { + if ( + !fetchedFromDb || + fetchedFromDb.length <= SnodePoolConstants.minSnodePoolCount + countToAddToRequirement + ) { window?.log?.warn( `getSnodePoolFromDBOrFetchFromSeed: not enough snodes in db (${fetchedFromDb?.length}), Fetching from seed node instead... ` ); @@ -166,21 +156,21 @@ export async function getSnodePoolFromDBOrFetchFromSeed( return randomSnodePool; } -export async function getRandomSnodePool(): Promise> { - if (randomSnodePool.length <= minSnodePoolCount) { - await getSnodePoolFromDBOrFetchFromSeed(); +async function getRandomSnodePool(): Promise> { + if (randomSnodePool.length <= SnodePoolConstants.minSnodePoolCount) { + await SnodePool.getSnodePoolFromDBOrFetchFromSeed(); } return randomSnodePool; } /** - * This function tries to fetch snodes list from seednodes and handle retries. + * This function tries to fetch snodes list from seed nodes and handle retries. * It will write the updated snode list to the db once it succeeded. - * It also resets the onionpaths failure count and snode failure count. + * It also resets the onion paths failure count and snode failure count. * This function does not throw. */ -export async function TEST_fetchFromSeedWithRetriesAndWriteToDb() { +async function TEST_fetchFromSeedWithRetriesAndWriteToDb() { const seedNodes = window.getSeedNodeList(); if (!seedNodes || !seedNodes.length) { @@ -231,7 +221,7 @@ async function tryToGetConsensusWithSnodesWithRetries() { async () => { const commonNodes = await ServiceNodesList.getSnodePoolFromSnodes(); - if (!commonNodes || commonNodes.length < requiredSnodesForAgreement) { + if (!commonNodes || commonNodes.length < SnodePoolConstants.requiredSnodesForAgreement) { // throwing makes trigger a retry if we have some left. window?.log?.info( `tryToGetConsensusWithSnodesWithRetries: Not enough common nodes ${commonNodes?.length}` @@ -267,7 +257,7 @@ async function tryToGetConsensusWithSnodesWithRetries() { * @param pubkey the associatedWith publicKey * @param snodeToDropEd25519 the snode pubkey to drop */ -export async function dropSnodeFromSwarmIfNeeded( +async function dropSnodeFromSwarmIfNeeded( pubkey: string, snodeToDropEd25519: string ): Promise { @@ -276,7 +266,7 @@ export async function dropSnodeFromSwarmIfNeeded( `Dropping ${ed25519Str(snodeToDropEd25519)} from swarm of ${ed25519Str(pubkey)}` ); - const existingSwarm = await getSwarmFromCacheOrDb(pubkey); + const existingSwarm = await SnodePool.getSwarmFromCacheOrDb(pubkey); if (!existingSwarm.includes(snodeToDropEd25519)) { return; @@ -286,7 +276,7 @@ export async function dropSnodeFromSwarmIfNeeded( await internalUpdateSwarmFor(pubkey, updatedSwarm); } -export async function updateSwarmFor(pubkey: string, snodes: Array): Promise { +async function updateSwarmFor(pubkey: string, snodes: Array): Promise { const edkeys = snodes.map((sn: Snode) => sn.pubkey_ed25519); await internalUpdateSwarmFor(pubkey, edkeys); } @@ -298,7 +288,7 @@ async function internalUpdateSwarmFor(pubkey: string, edkeys: Array) { await Data.updateSwarmNodesForPubkey(pubkey, edkeys); } -export async function getSwarmFromCacheOrDb(pubkey: string): Promise> { +async function getSwarmFromCacheOrDb(pubkey: string): Promise> { // NOTE: important that maybeNodes is not [] here const existingCache = swarmCache.get(pubkey); if (existingCache === undefined) { @@ -316,14 +306,13 @@ export async function getSwarmFromCacheOrDb(pubkey: string): Promise> { - const nodes = await getSwarmFromCacheOrDb(pubkey); +async function getSwarmFor(pubkey: string): Promise> { + const nodes = await SnodePool.getSwarmFromCacheOrDb(pubkey); // See how many are actually still reachable // the nodes still reachable are the one still present in the snode pool const goodNodes = randomSnodePool.filter((n: Snode) => nodes.indexOf(n.pubkey_ed25519) !== -1); - - if (goodNodes.length >= minSwarmSnodeCount) { + if (goodNodes.length >= SnodePoolConstants.minSwarmSnodeCount) { return goodNodes; } @@ -331,13 +320,27 @@ export async function getSwarmFor(pubkey: string): Promise> { return getSwarmFromNetworkAndSave(pubkey); } +async function getNodeFromSwarmOrThrow(pubkey: string): Promise { + const swarm = await SnodePool.getSwarmFor(pubkey); + if (!isEmpty(swarm)) { + const node = sample(swarm); + if (node) { + return node; + } + } + window.log.warn( + `getNodeFromSwarmOrThrow: could not get one random node for pk ${ed25519Str(pubkey)}` + ); + throw new Error(`getNodeFromSwarmOrThrow: could not get one random node`); +} + /** - * Force a request to be made to the network to fetch the swarm of the specificied pubkey, and cache the result. + * Force a request to be made to the network to fetch the swarm of the specified pubkey, and cache the result. * Note: should not be called directly unless you know what you are doing. Use the cached `getSwarmFor()` function instead * @param pubkey the pubkey to request the swarm for * @returns the fresh swarm, shuffled */ -export async function getFreshSwarmFor(pubkey: string): Promise> { +async function getFreshSwarmFor(pubkey: string): Promise> { return getSwarmFromNetworkAndSave(pubkey); } @@ -351,3 +354,24 @@ async function getSwarmFromNetworkAndSave(pubkey: string) { return shuffledSwarm; } + +export const SnodePool = { + // snode pool + dropSnodeFromSnodePool, + forceRefreshRandomSnodePool, + getRandomSnode, + getRandomSnodePool, + getSnodePoolFromDBOrFetchFromSeed, + + // swarm + dropSnodeFromSwarmIfNeeded, + updateSwarmFor, + getSwarmFromCacheOrDb, + getSwarmFor, + getNodeFromSwarmOrThrow, + getFreshSwarmFor, + + // tests + TEST_resetState, + TEST_fetchFromSeedWithRetriesAndWriteToDb, +}; diff --git a/ts/session/apis/snode_api/snodePoolConstants.ts b/ts/session/apis/snode_api/snodePoolConstants.ts new file mode 100644 index 0000000000..1058278ebb --- /dev/null +++ b/ts/session/apis/snode_api/snodePoolConstants.ts @@ -0,0 +1,34 @@ +import { minimumGuardCount, ONION_REQUEST_HOPS } from '../../onions/onionPathConstants'; + +/** + * If we get less than this snode in a swarm, we fetch new snodes for this pubkey + */ +const minSwarmSnodeCount = 3; + +/** + * If we get less than minSnodePoolCount we consider that we need to fetch the new snode pool from a seed node + * and not from those snodes. + */ + +export const minSnodePoolCount = minimumGuardCount * (ONION_REQUEST_HOPS + 1) * 2; + +/** + * If we get less than this amount of snodes (24), lets try to get an updated list from those while we can + */ +const minSnodePoolCountBeforeRefreshFromSnodes = minSnodePoolCount * 2; + +/** + * If we do a request to fetch nodes from snodes and they don't return at least + * the same `requiredSnodesForAgreement` snodes we consider that this is not a valid return. + * + * Too many nodes are not shared for this call to be trustworthy + */ +const requiredSnodesForAgreement = 24; + +export const SnodePoolConstants = { + // constants + minSnodePoolCount, + minSnodePoolCountBeforeRefreshFromSnodes, + requiredSnodesForAgreement, + minSwarmSnodeCount, +}; diff --git a/ts/session/apis/snode_api/snodeSignatures.ts b/ts/session/apis/snode_api/snodeSignatures.ts deleted file mode 100644 index 1e16ea552c..0000000000 --- a/ts/session/apis/snode_api/snodeSignatures.ts +++ /dev/null @@ -1,182 +0,0 @@ -import { getSodiumRenderer } from '../../crypto'; -import { StringUtils, UserUtils } from '../../utils'; -import { fromHexToArray, fromUInt8ArrayToBase64 } from '../../utils/String'; -import { WithShortenOrExtend } from './SnodeRequestTypes'; -import { GetNetworkTime } from './getNetworkTime'; - -export type SnodeSignatureResult = { - timestamp: number; - // sig_timestamp: number; - signature: string; - pubkey_ed25519: string; - pubkey: string; // this is the x25519 key of the pubkey we are doing the request to (ourself for our swarm usually) -}; - -async function getSnodeSignatureByHashesParams({ - messages, - method, - pubkey, -}: { - pubkey: string; - messages: Array; - method: 'delete'; -}): Promise< - Pick & { - messages: Array; - } -> { - const ourEd25519Key = await UserUtils.getUserED25519KeyPair(); - - if (!ourEd25519Key) { - const err = `getSnodeSignatureParams "${method}": User has no getUserED25519KeyPair()`; - window.log.warn(err); - throw new Error(err); - } - const edKeyPrivBytes = fromHexToArray(ourEd25519Key?.privKey); - const verificationData = StringUtils.encode(`${method}${messages.join('')}`, 'utf8'); - const message = new Uint8Array(verificationData); - - const sodium = await getSodiumRenderer(); - try { - const signature = sodium.crypto_sign_detached(message, edKeyPrivBytes); - const signatureBase64 = fromUInt8ArrayToBase64(signature); - - return { - signature: signatureBase64, - pubkey_ed25519: ourEd25519Key.pubKey, - pubkey, - messages, - }; - } catch (e) { - window.log.warn('getSnodeSignatureParams failed with: ', e.message); - throw e; - } -} - -async function getSnodeSignatureParams(params: { - pubkey: string; - namespace: number | null | 'all'; // 'all' can be used to clear all namespaces (during account deletion) - method: 'retrieve' | 'store' | 'delete_all'; -}): Promise { - const ourEd25519Key = await UserUtils.getUserED25519KeyPair(); - - if (!ourEd25519Key) { - const err = `getSnodeSignatureParams "${params.method}": User has no getUserED25519KeyPair()`; - window.log.warn(err); - throw new Error(err); - } - const namespace = params.namespace || 0; - const edKeyPrivBytes = fromHexToArray(ourEd25519Key?.privKey); - - const signatureTimestamp = GetNetworkTime.getNowWithNetworkOffset(); - - const withoutNamespace = `${params.method}${signatureTimestamp}`; - const withNamespace = `${params.method}${namespace}${signatureTimestamp}`; - const verificationData = - namespace === 0 - ? StringUtils.encode(withoutNamespace, 'utf8') - : StringUtils.encode(withNamespace, 'utf8'); - - const message = new Uint8Array(verificationData); - - const sodium = await getSodiumRenderer(); - try { - const signature = sodium.crypto_sign_detached(message, edKeyPrivBytes); - const signatureBase64 = fromUInt8ArrayToBase64(signature); - - return { - // sig_timestamp: signatureTimestamp, - timestamp: signatureTimestamp, - signature: signatureBase64, - pubkey_ed25519: ourEd25519Key.pubKey, - pubkey: params.pubkey, - }; - } catch (e) { - window.log.warn('getSnodeSignatureParams failed with: ', e.message); - throw e; - } -} - -/** - * NOTE if shortenOrExtend is an empty string it means we want to hardcode the expiry to a TTL value, otherwise it's to shorten or extend the TTL - */ -async function generateUpdateExpirySignature({ - shortenOrExtend, - timestamp, - messageHashes, -}: { - timestamp: number; - messageHashes: Array; -} & WithShortenOrExtend): Promise<{ signature: string; pubkey_ed25519: string } | null> { - const ourEd25519Key = await UserUtils.getUserED25519KeyPair(); - - if (!ourEd25519Key) { - const err = 'getSnodeSignatureParams "expiry": User has no getUserED25519KeyPair()'; - window.log.warn(err); - throw new Error(err); - } - - const edKeyPrivBytes = fromHexToArray(ourEd25519Key?.privKey); - - // ("expire" || ShortenOrExtend || expiry || messages[0] || ... || messages[N]) - const verificationString = `expire${shortenOrExtend}${timestamp}${messageHashes.join('')}`; - const verificationData = StringUtils.encode(verificationString, 'utf8'); - const message = new Uint8Array(verificationData); - - const sodium = await getSodiumRenderer(); - try { - const signature = sodium.crypto_sign_detached(message, edKeyPrivBytes); - const signatureBase64 = fromUInt8ArrayToBase64(signature); - - return { - signature: signatureBase64, - pubkey_ed25519: ourEd25519Key.pubKey, - }; - } catch (e) { - window.log.warn('getSnodeSignatureParams "expiry" failed with: ', e.message); - return null; - } -} - -async function generateGetExpiriesSignature({ - timestamp, - messageHashes, -}: { - timestamp: number; - messageHashes: Array; -}): Promise<{ signature: string; pubkey_ed25519: string } | null> { - const ourEd25519Key = await UserUtils.getUserED25519KeyPair(); - if (!ourEd25519Key) { - const err = 'getSnodeSignatureParams "get_expiries": User has no getUserED25519KeyPair()'; - window.log.warn(err); - throw new Error(err); - } - - const edKeyPrivBytes = fromHexToArray(ourEd25519Key?.privKey); - - // ("get_expiries" || timestamp || messages[0] || ... || messages[N]) - const verificationString = `get_expiries${timestamp}${messageHashes.join('')}`; - const verificationData = StringUtils.encode(verificationString, 'utf8'); - const message = new Uint8Array(verificationData); - - const sodium = await getSodiumRenderer(); - try { - const signature = sodium.crypto_sign_detached(message, edKeyPrivBytes); - const signatureBase64 = fromUInt8ArrayToBase64(signature); - - return { - signature: signatureBase64, - pubkey_ed25519: ourEd25519Key.pubKey, - }; - } catch (e) { - window.log.warn('generateSignature "get_expiries" failed with: ', e.message); - return null; - } -} - -export const SnodeSignature = { - getSnodeSignatureParams, - getSnodeSignatureByHashesParams, - generateUpdateExpirySignature, - generateGetExpiriesSignature, -}; diff --git a/ts/session/apis/snode_api/storeMessage.ts b/ts/session/apis/snode_api/storeMessage.ts deleted file mode 100644 index 1328b8cb05..0000000000 --- a/ts/session/apis/snode_api/storeMessage.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { isEmpty } from 'lodash'; -import { Snode } from '../../../data/types'; -import { - DeleteByHashesFromNodeParams, - DeleteFromNodeSubRequest, - NotEmptyArrayOfBatchResults, - StoreOnNodeParams, - StoreOnNodeSubRequest, -} from './SnodeRequestTypes'; -import { doSnodeBatchRequest } from './batchRequest'; -import { GetNetworkTime } from './getNetworkTime'; - -function justStores(params: Array) { - return params.map(p => { - return { - method: 'store', - params: p, - } as StoreOnNodeSubRequest; - }); -} - -function buildStoreRequests( - params: Array, - toDeleteOnSequence: DeleteByHashesFromNodeParams | null -): Array { - if (!toDeleteOnSequence || isEmpty(toDeleteOnSequence)) { - return justStores(params); - } - return [...justStores(params), ...buildDeleteByHashesSubRequest(toDeleteOnSequence)]; -} - -function buildDeleteByHashesSubRequest( - params: DeleteByHashesFromNodeParams -): Array { - return [ - { - method: 'delete', - params, - }, - ]; -} - -/** - * Send a 'store' request to the specified targetNode, using params as argument - * @returns the Array of stored hashes if it is a success, or null - */ -async function storeOnNode( - targetNode: Snode, - params: Array, - toDeleteOnSequence: DeleteByHashesFromNodeParams | null -): Promise { - try { - const subRequests = buildStoreRequests(params, toDeleteOnSequence); - const result = await doSnodeBatchRequest( - subRequests, - targetNode, - 4000, - params[0].pubkey, - toDeleteOnSequence ? 'sequence' : 'batch' - ); - - if (!result || !result.length) { - window?.log?.warn( - `SessionSnodeAPI::requestSnodesForPubkeyWithTargetNodeRetryable - sessionRpc on ${targetNode.ip}:${targetNode.port} returned falsish value`, - result - ); - throw new Error('requestSnodesForPubkeyWithTargetNodeRetryable: Invalid result'); - } - - const firstResult = result[0]; - - if (firstResult.code !== 200) { - window?.log?.warn('first result status is not 200 for storeOnNode but: ', firstResult.code); - throw new Error('storeOnNode: Invalid status code'); - } - - GetNetworkTime.handleTimestampOffsetFromNetwork('store', firstResult.body.t); - - return result; - } catch (e) { - window?.log?.warn('store - send error:', e, `destination ${targetNode.ip}:${targetNode.port}`); - throw e; - } -} - -export const SnodeAPIStore = { storeOnNode }; diff --git a/ts/session/apis/snode_api/swarmPolling.ts b/ts/session/apis/snode_api/swarmPolling.ts index 6a5976b0e6..bfdd22a81b 100644 --- a/ts/session/apis/snode_api/swarmPolling.ts +++ b/ts/session/apis/snode_api/swarmPolling.ts @@ -1,37 +1,65 @@ /* eslint-disable no-await-in-loop */ /* eslint-disable more/no-then */ /* eslint-disable @typescript-eslint/no-misused-promises */ -import { compact, concat, flatten, isEmpty, last, sample, uniqBy } from 'lodash'; +import { GroupPubkeyType } from 'libsession_util_nodejs'; +import { z } from 'zod'; + +import { + compact, + concat, + flatten, + isArray, + isEmpty, + last, + omit, + sample, + toNumber, + uniqBy, +} from 'lodash'; +import { v4 } from 'uuid'; import { Data } from '../../../data/data'; -import { Snode } from '../../../data/types'; import { SignalService } from '../../../protobuf'; import * as Receiver from '../../../receiver/receiver'; import { PubKey } from '../../types'; import { ERROR_CODE_NO_CONNECT } from './SNodeAPI'; -import * as snodePool from './snodePool'; import { ConversationModel } from '../../../models/conversation'; -import { ConfigMessageHandler } from '../../../receiver/configMessage'; +import { LibsessionMessageHandler } from '../../../receiver/libsession/handleLibSessionMessage'; +import { EnvelopePlus } from '../../../receiver/types'; import { updateIsOnline } from '../../../state/ducks/onion'; -import { ReleasedFeatures } from '../../../util/releaseFeature'; +import { assertUnreachable } from '../../../types/sqlSharedTypes'; import { GenericWrapperActions, + MetaGroupWrapperActions, UserConfigWrapperActions, UserGroupsWrapperActions, } from '../../../webworker/workers/browser/libsession_worker_interface'; import { DURATION, SWARM_POLLING_TIMEOUT } from '../../constants'; -import { getConversationController } from '../../conversations'; +import { ConvoHub } from '../../conversations'; +import { getSodiumRenderer } from '../../crypto'; import { StringUtils, UserUtils } from '../../utils'; -import { ed25519Str } from '../../utils/String'; -import { NotFoundError } from '../../utils/errors'; +import { sleepFor } from '../../utils/Promise'; +import { ed25519Str, fromBase64ToArray, fromHexToArray } from '../../utils/String'; +import { NotFoundError, PreConditionFailed } from '../../utils/errors'; import { LibSessionUtil } from '../../utils/libsession/libsession_utils'; -import { SnodeNamespace, SnodeNamespaces } from './namespaces'; +import { MultiEncryptUtils } from '../../utils/libsession/libsession_utils_multi_encrypt'; +import { SnodeNamespace, SnodeNamespaces, SnodeNamespacesUserConfig } from './namespaces'; +import { PollForGroup, PollForLegacy, PollForUs } from './pollingTypes'; import { SnodeAPIRetrieve } from './retrieveRequest'; +import { SnodePool } from './snodePool'; +import { SwarmPollingGroupConfig } from './swarm_polling_config/SwarmPollingGroupConfig'; +import { SwarmPollingUserConfig } from './swarm_polling_config/SwarmPollingUserConfig'; import { RetrieveMessageItem, RetrieveMessageItemWithNamespace, RetrieveMessagesResultsBatched, + RetrieveRequestResult, } from './types'; +import { ConversationTypeEnum } from '../../../models/types'; +import { Snode } from '../../../data/types'; +import { isDevProd } from '../../../shared/env_vars'; + +const minMsgCountShouldRetry = 95; function extractWebSocketContent( message: string, @@ -58,7 +86,6 @@ function extractWebSocketContent( return null; } } - let instance: SwarmPolling | undefined; const timeouts: Array = []; @@ -69,8 +96,22 @@ export const getSwarmPollingInstance = () => { return instance; }; +type GroupPollingEntry = { + pubkey: PubKey; + lastPolledTimestamp: number; + callbackFirstPoll?: () => Promise; +}; + +function entryToKey(entry: GroupPollingEntry) { + return entry.pubkey.key; +} + export class SwarmPolling { - private groupPolling: Array<{ pubkey: PubKey; lastPolledTimestamp: number }>; + private groupPolling: Array; + + /** + * lastHashes[snode_edkey][pubkey_polled][namespace_polled] = last_hash + */ private readonly lastHashes: Record>>; private hasStarted = false; @@ -114,29 +155,33 @@ export class SwarmPolling { this.resetSwarmPolling(); } - public forcePolledTimestamp(pubkey: PubKey, lastPoll: number) { - this.groupPolling = this.groupPolling.map(group => { - if (PubKey.isEqual(pubkey, group.pubkey)) { - return { - ...group, - lastPolledTimestamp: lastPoll, - }; - } - return group; + public forcePolledTimestamp(pubkey: string, lastPoll: number) { + const foundAt = this.groupPolling.findIndex(group => { + return PubKey.isEqual(pubkey, group.pubkey); }); + + if (foundAt > -1) { + this.groupPolling[foundAt].lastPolledTimestamp = lastPoll; + } } - public addGroupId(pubkey: PubKey) { - if (this.groupPolling.findIndex(m => m.pubkey.key === pubkey.key) === -1) { - window?.log?.info('Swarm addGroupId: adding pubkey to polling', pubkey.key); - this.groupPolling.push({ pubkey, lastPolledTimestamp: 0 }); + public addGroupId(pubkey: PubKey | string, callbackFirstPoll?: () => Promise) { + const pk = PubKey.cast(pubkey); + if (this.groupPolling.findIndex(m => m.pubkey.key === pk.key) === -1) { + window?.log?.info('Swarm addGroupId: adding pubkey to polling', pk.key); + this.groupPolling.push({ pubkey: pk, lastPolledTimestamp: 0, callbackFirstPoll }); + } else if (callbackFirstPoll) { + // group is already polled. Hopefully we already have keys for it to decrypt messages? + void sleepFor(2000).then(() => { + void callbackFirstPoll(); + }); } } - public removePubkey(pk: PubKey | string) { + public removePubkey(pk: PubKey | string, reason: string) { const pubkey = PubKey.cast(pk); if (this.groupPolling.some(group => pubkey.key === group.pubkey.key)) { - window?.log?.info('Swarm removePubkey: removing pubkey from polling', pubkey.key); + window?.log?.info(`SwarmPolling: removing ${ed25519Str(pubkey.key)} for reason: "${reason}"`); this.groupPolling = this.groupPolling.filter(group => !pubkey.isEqual(group.pubkey)); } } @@ -150,28 +195,94 @@ export class SwarmPolling { * -> an activeAt more than a week old is considered inactive, and not polled much (every 2 minutes) */ public getPollingTimeout(convoId: PubKey) { - const convo = getConversationController().get(convoId.key); + const convo = ConvoHub.use().get(convoId.key); if (!convo) { return SWARM_POLLING_TIMEOUT.INACTIVE; } - const activeAt = convo.get('active_at'); + const activeAt = convo.getActiveAt(); if (!activeAt) { return SWARM_POLLING_TIMEOUT.INACTIVE; } const currentTimestamp = Date.now(); + const diff = currentTimestamp - activeAt; // consider that this is an active group if activeAt is less than two days old - if (currentTimestamp - activeAt <= DURATION.DAYS * 2) { + if (diff <= DURATION.DAYS * 2) { return SWARM_POLLING_TIMEOUT.ACTIVE; } - if (currentTimestamp - activeAt <= DURATION.DAYS * 7) { + if (diff <= DURATION.DAYS * 7) { return SWARM_POLLING_TIMEOUT.MEDIUM_ACTIVE; } return SWARM_POLLING_TIMEOUT.INACTIVE; } + public shouldPollByTimeout(entry: GroupPollingEntry) { + const convoPollingTimeout = this.getPollingTimeout(entry.pubkey); + const diff = Date.now() - entry.lastPolledTimestamp; + return diff >= convoPollingTimeout; + } + + public async getPollingDetails(pollingEntries: Array) { + // Note: all of those checks are explicitly made only based on the libsession wrappers data, and NOT the DB. + // Eventually, we want to get rid of the duplication between the DB and libsession wrappers. + // If you need to add a check based on the DB, this is code smell. + let toPollDetails: Array = []; + const ourPubkey = UserUtils.getOurPubKeyStrFromCache(); + + if (pollingEntries.some(m => m.pubkey.key === ourPubkey)) { + throw new Error( + 'pollingEntries should only contain group swarm (legacy or not), but not ourself' + ); + } + + // First, make sure we do poll for our own swarm. Note: we always poll as often as possible for our swarm + toPollDetails.push([ourPubkey, ConversationTypeEnum.PRIVATE]); + + const allGroupsLegacyInWrapper = await UserGroupsWrapperActions.getAllLegacyGroups(); + const allGroupsInWrapper = await UserGroupsWrapperActions.getAllGroups(); + if (!isArray(allGroupsLegacyInWrapper) || !isArray(allGroupsInWrapper)) { + throw new Error('getAllLegacyGroups or getAllGroups returned unknown result'); + } + + // only groups NOT starting with 03 + const legacyGroups = pollingEntries.filter(m => !PubKey.is03Pubkey(m.pubkey.key)); + + // only groups starting with 03 + const groups = pollingEntries.filter(m => PubKey.is03Pubkey(m.pubkey.key)); + + // let's grab the groups and legacy groups which should be left as they are not in their corresponding wrapper + const legacyGroupsToLeave = legacyGroups + .filter(m => !allGroupsLegacyInWrapper.some(w => w.pubkeyHex === m.pubkey.key)) + .map(entryToKey); + const groupsToLeave = groups + .filter(m => !allGroupsInWrapper.some(w => w.pubkeyHex === m.pubkey.key)) + .map(entryToKey); + + const allLegacyGroupsTracked = legacyGroups + .filter(m => this.shouldPollByTimeout(m)) // should we poll from it depending on this group activity? + .filter(m => allGroupsLegacyInWrapper.some(w => w.pubkeyHex === m.pubkey.key)) // we don't poll from legacy groups which are not in the user group wrapper + .map(m => m.pubkey.key) // extract the pubkey + .map(m => [m, ConversationTypeEnum.GROUP] as PollForLegacy); // + toPollDetails = concat(toPollDetails, allLegacyGroupsTracked); + + const allGroupsTracked = groups + .filter(m => this.shouldPollByTimeout(m)) // should we poll from it depending on this group activity? + .filter(m => { + // We don't poll from groups which are not in the user group wrapper, and for those which are not marked as accepted + // We don't want to leave them, we just don't want to poll from them. + const found = allGroupsInWrapper.find(w => w.pubkeyHex === m.pubkey.key); + return found && !found.invitePending; + }) + .map(m => m.pubkey.key as GroupPubkeyType) // extract the pubkey + .map(m => [m, ConversationTypeEnum.GROUPV2] as PollForGroup); + + toPollDetails = concat(toPollDetails, allGroupsTracked); + + return { toPollDetails, legacyGroupsToLeave, groupsToLeave }; + } + /** * Only public for testing */ @@ -179,80 +290,152 @@ export class SwarmPolling { if (!window.getGlobalOnlineStatus()) { window?.log?.error('pollForAllKeys: offline'); // Very important to set up a new polling call so we do retry at some point - timeouts.push(setTimeout(this.pollForAllKeys.bind(this), SWARM_POLLING_TIMEOUT.ACTIVE)); + timeouts.push( + setTimeout(this.pollForAllKeys.bind(this), isDevProd() ? 500 : SWARM_POLLING_TIMEOUT.ACTIVE) + ); return; } - // we always poll as often as possible for our pubkey - const ourPubkey = UserUtils.getOurPubKeyFromCache(); - const userNamespaces = await this.getUserNamespacesPolled(); - const directPromise = Promise.all([this.pollOnceForKey(ourPubkey, false, userNamespaces)]).then( - () => undefined - ); - - const now = Date.now(); - const groupPromises = this.groupPolling.map(async group => { - const convoPollingTimeout = this.getPollingTimeout(group.pubkey); - - const diff = now - group.lastPolledTimestamp; - - const loggingId = - getConversationController().get(group.pubkey.key)?.idForLogging() || group.pubkey.key; - if (diff >= convoPollingTimeout) { - window?.log?.debug( - `Polling for ${loggingId}; timeout: ${convoPollingTimeout}; diff: ${diff} ` - ); - return this.pollOnceForKey(group.pubkey, true, [SnodeNamespaces.ClosedGroupMessage]); - } - window?.log?.debug( - `Not polling for ${loggingId}; timeout: ${convoPollingTimeout} ; diff: ${diff}` - ); + const { toPollDetails, groupsToLeave, legacyGroupsToLeave } = await this.getPollingDetails( + this.groupPolling + ); + // first, leave anything which shouldn't be there anymore + await Promise.all( + concat(groupsToLeave, legacyGroupsToLeave).map(m => + this.notPollingForGroupAsNotInWrapper(m, 'not in wrapper before poll') + ) + ); - return Promise.resolve(); - }); try { - await Promise.all(concat([directPromise], groupPromises)); + await Promise.all(toPollDetails.map(toPoll => this.pollOnceForKey(toPoll))); } catch (e) { window?.log?.warn('pollForAllKeys exception: ', e); throw e; } finally { - timeouts.push(setTimeout(this.pollForAllKeys.bind(this), SWARM_POLLING_TIMEOUT.ACTIVE)); + timeouts.push( + setTimeout(this.pollForAllKeys.bind(this), isDevProd() ? 500 : SWARM_POLLING_TIMEOUT.ACTIVE) + ); + } + } + + public async updateLastPollTimestampForPubkey({ + countMessages, + pubkey, + type, + }: { + type: ConversationTypeEnum; + countMessages: number; + pubkey: string; + }) { + // if all snodes returned an error (null), no need to update the lastPolledTimestamp + if (type === ConversationTypeEnum.GROUP || type === ConversationTypeEnum.GROUPV2) { + window?.log?.debug( + `Polled for group${ed25519Str(pubkey)} got ${countMessages} messages back.` + ); + let lastPolledTimestamp = Date.now(); + if (countMessages >= minMsgCountShouldRetry) { + // if we get `minMsgCountShouldRetry` messages or more back, it means there are probably more than this + // so make sure to retry the polling in the next 5sec by marking the last polled timestamp way before that it is really + // this is a kind of hack + lastPolledTimestamp = Date.now() - SWARM_POLLING_TIMEOUT.INACTIVE - 5 * 1000; + } // update the last fetched timestamp + + this.forcePolledTimestamp(pubkey, lastPolledTimestamp); + } + } + + public async handleUserOrGroupConfMessages({ + confMessages, + pubkey, + type, + }: { + type: ConversationTypeEnum; + pubkey: string; + confMessages: Array | null; + }) { + if (!confMessages) { + return; + } + + // first make sure to handle the shared user config message first + if (type === ConversationTypeEnum.PRIVATE && UserUtils.isUsFromCache(pubkey)) { + // this does not throw, no matter what happens + await SwarmPollingUserConfig.handleUserSharedConfigMessages(confMessages); + return; + } + if (type === ConversationTypeEnum.GROUPV2 && PubKey.is03Pubkey(pubkey)) { + await sleepFor(100); + await SwarmPollingGroupConfig.handleGroupSharedConfigMessages(confMessages, pubkey); + } + } + + public async handleRevokedMessages({ + revokedMessages, + groupPk, + type, + }: { + type: ConversationTypeEnum; + groupPk: string; + revokedMessages: Array | null; + }) { + if (!revokedMessages || isEmpty(revokedMessages)) { + return; + } + const sodium = await getSodiumRenderer(); + const userEd25519SecretKey = (await UserUtils.getUserED25519KeyPairBytes()).privKeyBytes; + const ourPk = UserUtils.getOurPubKeyStrFromCache(); + const senderEd25519Pubkey = fromHexToArray(groupPk.slice(2)); + + if (type === ConversationTypeEnum.GROUPV2 && PubKey.is03Pubkey(groupPk)) { + for (let index = 0; index < revokedMessages.length; index++) { + const revokedMessage = revokedMessages[index]; + const successWith = await MultiEncryptUtils.multiDecryptAnyEncryptionDomain({ + encoded: fromBase64ToArray(revokedMessage.data), + userEd25519SecretKey, + senderEd25519Pubkey, + }); + if (successWith && successWith.decrypted && !isEmpty(successWith.decrypted)) { + try { + await LibsessionMessageHandler.handleLibSessionMessage({ + decrypted: successWith.decrypted, + domain: successWith.domain, + groupPk, + ourPk, + sodium, + }); + } catch (e) { + window.log.warn('handleLibSessionMessage failed with:', e.message); + } + } + } } } /** * Only exposed as public for testing */ - public async pollOnceForKey( - pubkey: PubKey, - isGroup: boolean, - namespaces: Array - ) { - const polledPubkey = pubkey.key; + public async pollOnceForKey([pubkey, type]: PollForUs | PollForLegacy | PollForGroup) { + const namespaces = this.getNamespacesToPollFrom(type); + const swarmSnodes = await SnodePool.getSwarmFor(pubkey); let resultsFromAllNamespaces: RetrieveMessagesResultsBatched | null; - const swarmSnodes = await snodePool.getSwarmFor(polledPubkey); let toPollFrom: Snode | undefined; + try { toPollFrom = sample(swarmSnodes); if (!toPollFrom) { - throw new Error(`pollOnceForKey: no snode in swarm for ${ed25519Str(polledPubkey)}`); + throw new Error(`pollOnceForKey: no snode in swarm for ${ed25519Str(pubkey)}`); } // Note: always print something so we know if the polling is hanging window.log.info( - `about to pollNodeForKey of ${ed25519Str(pubkey.key)} from snode: ${ed25519Str(toPollFrom.pubkey_ed25519)} namespaces: ${namespaces} ` - ); - resultsFromAllNamespaces = await this.pollNodeForKey( - toPollFrom, - pubkey, - namespaces, - !isGroup + `about to pollNodeForKey of ${ed25519Str(pubkey)} from snode: ${ed25519Str(toPollFrom.pubkey_ed25519)} namespaces: ${namespaces} ` ); + resultsFromAllNamespaces = await this.pollNodeForKey(toPollFrom, pubkey, namespaces, type); // Note: always print something so we know if the polling is hanging window.log.info( - `pollNodeForKey of ${ed25519Str(pubkey.key)} from snode: ${ed25519Str(toPollFrom.pubkey_ed25519)} namespaces: ${namespaces} returned: ${resultsFromAllNamespaces?.length}` + `pollNodeForKey of ${ed25519Str(pubkey)} from snode: ${ed25519Str(toPollFrom.pubkey_ed25519)} namespaces: ${namespaces} returned: ${resultsFromAllNamespaces?.length}` ); } catch (e) { window.log.warn( @@ -261,256 +444,205 @@ export class SwarmPolling { resultsFromAllNamespaces = null; } - let allNamespacesWithoutUserConfigIfNeeded: Array = []; - const userConfigLibsession = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); - - // check if we just fetched the details from the config namespaces. - // If yes, merge them together and exclude them from the rest of the messages. - if (userConfigLibsession && resultsFromAllNamespaces) { - const userConfigMessages = resultsFromAllNamespaces.filter(m => - SnodeNamespace.isUserConfigNamespace(m.namespace) - ); - - const userConfigMessagesWithNamespace: Array> = - userConfigMessages.map(r => { - return (r.messages.messages || []).map(m => { - return { ...m, namespace: r.namespace }; - }); - }); - - allNamespacesWithoutUserConfigIfNeeded = flatten( - compact( - resultsFromAllNamespaces - .filter(m => !SnodeNamespace.isUserConfigNamespace(m.namespace)) - .map(r => r.messages.messages) - ) - ); - const userConfigMessagesMerged = flatten(compact(userConfigMessages)); - - if (!isGroup && userConfigMessagesMerged.length) { - window.log.info( - `received userConfigMessages count: ${userConfigMessagesMerged.length} for key ${pubkey.key}` - ); - try { - await this.handleSharedConfigMessages(flatten(userConfigMessagesWithNamespace)); - } catch (e) { - window.log.warn( - `handleSharedConfigMessages of ${userConfigMessagesMerged.length} failed with ${e.message}` - ); - // not rethrowing - } - } - - // first make sure to handle the shared user config message first - } else { - allNamespacesWithoutUserConfigIfNeeded = flatten( - compact(resultsFromAllNamespaces?.map(m => m.messages.messages)) - ); - } - if (allNamespacesWithoutUserConfigIfNeeded.length) { - window.log.debug( - `received allNamespacesWithoutUserConfigIfNeeded: ${allNamespacesWithoutUserConfigIfNeeded.length}` - ); + if (!resultsFromAllNamespaces?.length) { + // Not a single message from any of the polled namespace was retrieved. + // We must still mark the current pubkey as "was just polled" + await this.updateLastPollTimestampForPubkey({ + countMessages: 0, + pubkey, + type, + }); + return; } + const { confMessages, otherMessages, revokedMessages } = filterMessagesPerTypeOfConvo( + type, + resultsFromAllNamespaces + ); + window.log.debug( + `received confMessages:${confMessages?.length || 0}, revokedMessages:${revokedMessages?.length || 0}, , otherMessages:${otherMessages?.length || 0}, ` + ); + // We always handle the config messages first (for groups 03 or our own messages) + await this.handleUserOrGroupConfMessages({ confMessages, pubkey, type }); + await this.handleRevokedMessages({ revokedMessages, groupPk: pubkey, type }); // Merge results into one list of unique messages - const messages = uniqBy(allNamespacesWithoutUserConfigIfNeeded, x => x.hash); + const uniqOtherMsgs = uniqBy(otherMessages, x => x.hash); + if (uniqOtherMsgs.length) { + window.log.debug(`received uniqOtherMsgs: ${uniqOtherMsgs.length} for type: ${type}`); + } + await this.updateLastPollTimestampForPubkey({ + countMessages: uniqOtherMsgs.length, + pubkey, + type, + }); - // if all snodes returned an error (null), no need to update the lastPolledTimestamp - if (isGroup) { - window?.log?.debug( - `Polled for group(${ed25519Str(pubkey.key)}):, got ${messages.length} messages back.` + const shouldDiscardMessages = await this.shouldLeaveNotPolledGroup({ type, pubkey }); + if (shouldDiscardMessages) { + window.log.info( + `polled a pk which should not be polled anymore: ${ed25519Str( + pubkey + )}. Discarding polling result` ); - let lastPolledTimestamp = Date.now(); - if (messages.length >= 95) { - // if we get 95 messages or more back, it means there are probably more than this - // so make sure to retry the polling in the next 5sec by marking the last polled timestamp way before that it is really - // this is a kind of hack - lastPolledTimestamp = Date.now() - SWARM_POLLING_TIMEOUT.INACTIVE - 5 * 1000; - } - // update the last fetched timestamp - this.groupPolling = this.groupPolling.map(group => { - if (PubKey.isEqual(pubkey, group.pubkey)) { - return { - ...group, - lastPolledTimestamp, - }; - } - return group; - }); + return; } - const newMessages = await this.handleSeenMessages(messages); + const newMessages = await this.handleSeenMessages(uniqOtherMsgs); window.log.info( - `handleSeenMessages: ${newMessages.length} out of ${messages.length} are not seen yet. snode: ${toPollFrom ? ed25519Str(toPollFrom.pubkey_ed25519) : 'undefined'}` + `handleSeenMessages: ${newMessages.length} out of ${uniqOtherMsgs.length} are not seen yet. snode: ${toPollFrom ? ed25519Str(toPollFrom.pubkey_ed25519) : 'undefined'}` ); + if (type === ConversationTypeEnum.GROUPV2) { + if (!PubKey.is03Pubkey(pubkey)) { + throw new Error('groupv2 expects a 03 key'); + } + // groupv2 messages are not stored in the cache, so for each that we process, we also add it as seen message. + // this is to take care of a crash half way through processing messages. We'd get the same 100 messages back, and we'd skip up to the first not seen message + await handleMessagesForGroupV2(newMessages, pubkey); + // if a callback was registered for the first poll of that group pk, call it + const groupEntry = this.groupPolling.find(m => m.pubkey.key === pubkey); + if (groupEntry && groupEntry.callbackFirstPoll) { + void groupEntry.callbackFirstPoll(); + groupEntry.callbackFirstPoll = undefined; + } - // don't handle incoming messages from group swarms when using the userconfig and the group is not one of the tracked group - const isUserConfigReleaseLive = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); - if ( - isUserConfigReleaseLive && - isGroup && - polledPubkey.startsWith('05') && - !(await UserGroupsWrapperActions.getLegacyGroup(polledPubkey)) // just check if a legacy group with that name exists - ) { - // that pubkey is not tracked in the wrapper anymore. Just discard those messages and make sure we are not polling - // TODOLATER we might need to do something like this for the new closed groups once released - getSwarmPollingInstance().removePubkey(polledPubkey); - } else { - // trigger the handling of all the other messages, not shared config related - newMessages.forEach(m => { - const content = extractWebSocketContent(m.data, m.hash); - if (!content) { - return; - } - - Receiver.handleRequest( - content.body, - isGroup ? polledPubkey : null, - content.messageHash, - m.expiration - ); - }); + return; } - } - private async handleSharedConfigMessages( - userConfigMessagesMerged: Array, - returnDisplayNameOnly?: boolean - ): Promise { - if (!userConfigMessagesMerged.length) { - return ''; - } - try { - window.log.info( - `handleConfigMessagesViaLibSession of "${userConfigMessagesMerged.length}" messages with libsession` - ); + // private and legacy groups are cached, so we can mark them as seen right away, they are still in the cache until processed correctly. + // at some point we should get rid of the cache completely, and do the same logic as for groupv2 above + await this.updateSeenMessages(newMessages); + // trigger the handling of all the other messages, not shared config related and not groupv2 encrypted + newMessages.forEach(m => { + const extracted = extractWebSocketContent(m.data, m.hash); - if (returnDisplayNameOnly) { - try { - const keypair = await UserUtils.getUserED25519KeyPairBytes(); - if (!keypair || !keypair.privKeyBytes) { - throw new Error('edkeypair not found for current user'); - } + if (!extracted || isEmpty(extracted)) { + return; + } - const privateKeyEd25519 = keypair.privKeyBytes; + Receiver.handleRequest( + extracted.body, + type === ConversationTypeEnum.GROUP ? pubkey : null, + extracted.messageHash, + m.expiration + ); + }); + } - // we take the lastest config message to create the wrapper in memory - const incomingConfigMessages = userConfigMessagesMerged.map(m => ({ - data: StringUtils.fromBase64ToArray(m.data), - hash: m.hash, - })); + private async shouldLeaveNotPolledGroup({ + pubkey, + type, + }: { + type: ConversationTypeEnum; + pubkey: string; + }) { + const correctlyTypedPk = PubKey.is03Pubkey(pubkey) || PubKey.is05Pubkey(pubkey) ? pubkey : null; + if (!correctlyTypedPk) { + return false; + } + const allLegacyGroupsInWrapper = await UserGroupsWrapperActions.getAllLegacyGroups(); + const allGroupsInWrapper = await UserGroupsWrapperActions.getAllGroups(); + + // don't handle incoming messages from group when the group is not tracked. + // this can happen when a group is removed from the wrapper while we were polling + + const newGroupButNotInWrapper = + PubKey.is03Pubkey(correctlyTypedPk) && + !allGroupsInWrapper.some(m => m.pubkeyHex === correctlyTypedPk); + const legacyGroupButNoInWrapper = + type === ConversationTypeEnum.GROUP && + PubKey.is05Pubkey(correctlyTypedPk) && + !allLegacyGroupsInWrapper.some(m => m.pubkeyHex === pubkey); + + if (newGroupButNotInWrapper || legacyGroupButNoInWrapper) { + // not tracked anymore in the wrapper. Discard messages and stop polling + await this.notPollingForGroupAsNotInWrapper(correctlyTypedPk, 'not in wrapper after poll'); + return true; + } + return false; + } - await UserConfigWrapperActions.init(privateKeyEd25519, null); - await UserConfigWrapperActions.merge(incomingConfigMessages); + private async getHashesToBump( + type: ConversationTypeEnum, + pubkey: string + ): Promise> { + if (type === ConversationTypeEnum.PRIVATE) { + const configHashesToBump: Array = []; + for (let index = 0; index < LibSessionUtil.requiredUserVariants.length; index++) { + const variant = LibSessionUtil.requiredUserVariants[index]; + try { + const toBump = await GenericWrapperActions.currentHashes(variant); - const name = await UserConfigWrapperActions.getName(); - if (!name) { - throw new Error('UserInfo not found or name is empty'); + if (toBump?.length) { + configHashesToBump.push(...toBump); } - return name; } catch (e) { - window.log.warn('LibSessionUtil.initializeLibSessionUtilWrappers failed with', e.message); - } finally { - await UserConfigWrapperActions.free(); + window.log.warn(`failed to get currentHashes for user variant ${variant}`); } - - return ''; } - - await ConfigMessageHandler.handleConfigMessagesViaLibSession(userConfigMessagesMerged); - } catch (e) { - const allMessageHases = userConfigMessagesMerged.map(m => m.hash).join(','); - window.log.warn( - `failed to handle messages hashes "${allMessageHases}" with libsession. Error: "${e.message}"` - ); + window.log.debug(`configHashesToBump private count: ${configHashesToBump.length}`); + return configHashesToBump; } - return ''; + if (type === ConversationTypeEnum.GROUPV2 && PubKey.is03Pubkey(pubkey)) { + const toBump = await MetaGroupWrapperActions.currentHashes(pubkey); + window.log.debug(`configHashesToBump group(${ed25519Str(pubkey)}) count: ${toBump.length}`); + return toBump; + } + return []; } // Fetches messages for `pubkey` from `node` potentially updating // the lash hash record private async pollNodeForKey( node: Snode, - pubkey: PubKey, + pubkey: string, namespaces: Array, - isUs: boolean + type: ConversationTypeEnum ): Promise { const namespaceLength = namespaces.length; if (namespaceLength <= 0) { throw new Error(`invalid number of retrieve namespace provided: ${namespaceLength}`); } const snodeEdkey = node.pubkey_ed25519; - const pkStr = pubkey.key; try { - const prevHashes = await Promise.all( - namespaces.map(namespace => this.getLastHash(snodeEdkey, pkStr, namespace)) + const configHashesToBump = await this.getHashesToBump(type, pubkey); + const namespacesAndLastHashes = await Promise.all( + namespaces.map(async namespace => { + const lastHash = await this.getLastHash(snodeEdkey, pubkey, namespace); + return { namespace, lastHash }; + }) ); - const configHashesToBump: Array = []; - if (await ReleasedFeatures.checkIsUserConfigFeatureReleased()) { - // TODOLATER add the logic to take care of the closed groups too once we have a way to do it with the wrappers - if (isUs) { - for (let index = 0; index < LibSessionUtil.requiredUserVariants.length; index++) { - const variant = LibSessionUtil.requiredUserVariants[index]; - try { - const toBump = await GenericWrapperActions.currentHashes(variant); - - if (toBump?.length) { - configHashesToBump.push(...toBump); - } - } catch (e) { - window.log.warn(`failed to get currentHashes for user variant ${variant}`); - } - } - window.log.debug(`configHashesToBump: ${configHashesToBump}`); - } - } - - let results = await SnodeAPIRetrieve.retrieveNextMessages( + const allow401s = type === ConversationTypeEnum.GROUPV2; + const results = await SnodeAPIRetrieve.retrieveNextMessagesNoRetries( node, - prevHashes, - pkStr, - namespaces, + pubkey, + namespacesAndLastHashes, UserUtils.getOurPubKeyStrFromCache(), - configHashesToBump + configHashesToBump, + allow401s ); if (!results.length) { return []; } - // NOTE when we asked to extend the expiry of the config messages, exclude it from the list of results as we do not want to mess up the last hash tracking logic - if (configHashesToBump.length) { - try { - const lastResult = results[results.length - 1]; - if (lastResult?.code !== 200) { - // the update expiry of our config messages didn't work. - window.log.warn( - `the update expiry of our tracked config hashes didn't work: ${JSON.stringify( - lastResult - )}` - ); - } - } catch (e) { - // nothing to do I suppose here. - } - results = results.slice(0, results.length - 1); - } - const lastMessages = results.map(r => { return last(r.messages.messages); }); + const namespacesWithNewLastHashes = namespacesAndLastHashes.map((n, i) => { + const newHash = lastMessages[i]?.hash || ''; + const role = SnodeNamespace.toRole(n.namespace); + return `${role}:${newHash}`; + }); window.log.info( - `updating last hashes for ${ed25519Str(pubkey.key)}: ${ed25519Str(snodeEdkey)} ${lastMessages.map(m => m?.hash || '')}` + `updating last hashes for ${ed25519Str(pubkey)}: ${ed25519Str(snodeEdkey)} ${namespacesWithNewLastHashes.join(', ')}` ); await Promise.all( lastMessages.map(async (lastMessage, index) => { if (!lastMessage) { - return undefined; + return; } - return this.updateLastHash({ + await this.updateLastHash({ edkey: snodeEdkey, pubkey, namespace: namespaces[index], @@ -520,6 +652,10 @@ export class SwarmPolling { }) ); + if (!window.inboxStore?.getState().onionPaths.isOnline) { + window.inboxStore?.dispatch(updateIsOnline(true)); + } + return results; } catch (e) { if (e.message === ERROR_CODE_NO_CONNECT) { @@ -534,15 +670,39 @@ export class SwarmPolling { } } + private async notPollingForGroupAsNotInWrapper(pubkey: string, reason: string) { + if (!PubKey.is03Pubkey(pubkey) && !PubKey.is05Pubkey(pubkey)) { + return; + } + window.log.debug( + `notPollingForGroupAsNotInWrapper ${ed25519Str(pubkey)} with reason:"${reason}"` + ); + if (PubKey.is05Pubkey(pubkey)) { + await ConvoHub.use().deleteLegacyGroup(pubkey, { + fromSyncMessage: true, + sendLeaveMessage: false, + }); + } else if (PubKey.is03Pubkey(pubkey)) { + await ConvoHub.use().deleteGroup(pubkey, { + fromSyncMessage: true, + sendLeaveMessage: false, + deletionType: 'doNotKeep', + deleteAllMessagesOnSwarm: false, + forceDestroyForAllMembers: false, + }); + } + } + private loadGroupIds() { - const convos = getConversationController().getConversations(); + const convos = ConvoHub.use().getConversations(); const closedGroupsOnly = convos.filter( (c: ConversationModel) => - c.isClosedGroup() && !c.isBlocked() && !c.get('isKickedFromGroup') && !c.get('left') + (c.isClosedGroupV2() && !c.isBlocked() && !c.isKickedFromGroup() && c.isApproved()) || + (c.isClosedGroup() && !c.isBlocked() && !c.isKickedFromGroup()) ); - closedGroupsOnly.forEach((c: any) => { + closedGroupsOnly.forEach(c => { this.addGroupId(new PubKey(c.id)); }); } @@ -559,28 +719,48 @@ export class SwarmPolling { const dupHashes = await Data.getSeenMessagesByHashList(incomingHashes); const newMessages = messages.filter((m: RetrieveMessageItem) => !dupHashes.includes(m.hash)); - if (newMessages.length) { - // NOTE setting expiresAt will trigger the global function destroyExpiredMessages() on it's next interval - const newHashes = newMessages.map((m: RetrieveMessageItem) => ({ + return newMessages; + } + + private async updateSeenMessages(processedMessages: Array) { + if (processedMessages.length) { + const newHashes = processedMessages.map((m: RetrieveMessageItem) => ({ + // NOTE setting expiresAt will trigger the global function destroyExpiredMessages() on it's next interval expiresAt: m.expiration, hash: m.hash, })); await Data.saveSeenMessageHashes(newHashes); } - return newMessages; } - private async getUserNamespacesPolled() { - const isUserConfigRelease = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); - return isUserConfigRelease - ? [ - SnodeNamespaces.UserMessages, - SnodeNamespaces.UserProfile, - SnodeNamespaces.UserContacts, - SnodeNamespaces.UserGroups, - SnodeNamespaces.ConvoInfoVolatile, - ] - : [SnodeNamespaces.UserMessages]; + // eslint-disable-next-line consistent-return + public getNamespacesToPollFrom(type: ConversationTypeEnum) { + if (type === ConversationTypeEnum.PRIVATE) { + const toRet: Array = [ + SnodeNamespaces.Default, + SnodeNamespaces.UserProfile, + SnodeNamespaces.UserContacts, + SnodeNamespaces.UserGroups, + SnodeNamespaces.ConvoInfoVolatile, + ]; + return toRet; + } + if (type === ConversationTypeEnum.GROUP) { + return [SnodeNamespaces.LegacyClosedGroup]; + } + if (type === ConversationTypeEnum.GROUPV2) { + return [ + SnodeNamespaces.ClosedGroupRevokedRetrievableMessages, // if we are kicked from the group, this will still return a 200, other namespaces will be 401/403 + SnodeNamespaces.ClosedGroupMessages, + SnodeNamespaces.ClosedGroupInfo, + SnodeNamespaces.ClosedGroupMembers, + SnodeNamespaces.ClosedGroupKeys, // keys are fetched last to avoid race conditions when someone deposits them + ]; + } + assertUnreachable( + type, + `getNamespacesToPollFrom case should have been unreachable: type:${type}` + ); } private async updateLastHash({ @@ -591,17 +771,16 @@ export class SwarmPolling { pubkey, }: { edkey: string; - pubkey: PubKey; + pubkey: string; namespace: number; hash: string; expiration: number; }): Promise { - const pkStr = pubkey.key; - const cached = await this.getLastHash(edkey, pubkey.key, namespace); + const cached = await this.getLastHash(edkey, pubkey, namespace); if (!cached || cached !== hash) { await Data.updateLastHash({ - convoId: pkStr, + convoId: pubkey, snode: edkey, hash, expiresAt: expiration, @@ -612,10 +791,10 @@ export class SwarmPolling { if (!this.lastHashes[edkey]) { this.lastHashes[edkey] = {}; } - if (!this.lastHashes[edkey][pkStr]) { - this.lastHashes[edkey][pkStr] = {}; + if (!this.lastHashes[edkey][pubkey]) { + this.lastHashes[edkey][pubkey] = {}; } - this.lastHashes[edkey][pkStr][namespace] = hash; + this.lastHashes[edkey][pubkey][namespace] = hash; } private async getLastHash(nodeEdKey: string, pubkey: string, namespace: number): Promise { @@ -643,7 +822,7 @@ export class SwarmPolling { const pubkey = UserUtils.getOurPubKeyFromCache(); - const swarmSnodes = await snodePool.getSwarmFor(pubkey.key); + const swarmSnodes = await SnodePool.getSwarmFor(pubkey.key); const toPollFrom = sample(swarmSnodes); if (!toPollFrom) { @@ -663,13 +842,13 @@ export class SwarmPolling { `[onboarding] about to pollOnceForOurDisplayName of ${ed25519Str(pubkey.key)} from snode: ${ed25519Str(toPollFrom.pubkey_ed25519)} namespaces: ${[SnodeNamespaces.UserProfile]} ` ); - const resultsFromUserProfile = await SnodeAPIRetrieve.retrieveNextMessages( + const resultsFromUserProfile = await SnodeAPIRetrieve.retrieveNextMessagesNoRetries( toPollFrom, - [''], pubkey.key, - [SnodeNamespaces.UserProfile], + [{ lastHash: '', namespace: SnodeNamespaces.UserProfile }], pubkey.key, - null + null, + false ); // Note: always print something so we know if the polling is hanging @@ -702,14 +881,214 @@ export class SwarmPolling { '[pollOnceForOurDisplayName] after merging there are no user config messages' ); } - const displayName = await this.handleSharedConfigMessages(userConfigMessagesMerged, true); + let displayNameFound: string | undefined; + try { + const keypair = await UserUtils.getUserED25519KeyPairBytes(); + if (!keypair || !keypair.privKeyBytes) { + throw new Error('edkeypair not found for current user'); + } + + const privateKeyEd25519 = keypair.privKeyBytes; + + // we take the latest config message to create the wrapper in memory + const incomingConfigMessages = userConfigMessagesMerged.map(m => ({ + data: StringUtils.fromBase64ToArray(m.data), + hash: m.hash, + })); + + await UserConfigWrapperActions.init(privateKeyEd25519, null); + await UserConfigWrapperActions.merge(incomingConfigMessages); + + const foundName = await UserConfigWrapperActions.getName(); + if (!foundName) { + throw new Error('UserInfo not found or name is empty'); + } + displayNameFound = foundName; + } catch (e) { + window.log.warn('LibSessionUtil.initializeLibSessionUtilWrappers failed with', e.message); + } finally { + await UserConfigWrapperActions.free(); + } - if (isEmpty(displayName)) { + if (!displayNameFound || isEmpty(displayNameFound)) { throw new NotFoundError( '[pollOnceForOurDisplayName] Got a config message from network but without a displayName...' ); } - return displayName; + return displayNameFound; + } +} + +// zod schema for retrieve items as returned by the snodes +const retrieveItemSchema = z.object({ + hash: z.string(), + data: z.string(), + expiration: z.number(), + timestamp: z.number(), +}); + +function retrieveItemWithNamespace( + results: Array +): Array { + return flatten( + compact( + results.map(result => + result.messages.messages?.map(r => { + // throws if the result is not expected + const parsedItem = retrieveItemSchema.parse(r); + return { + ...omit(parsedItem, 'timestamp'), + namespace: result.namespace, + storedAt: parsedItem.timestamp, + }; + }) + ) + ) + ); +} + +function filterMessagesPerTypeOfConvo( + type: T, + retrieveResults: RetrieveMessagesResultsBatched +): { + confMessages: Array | null; + revokedMessages: Array | null; + otherMessages: Array; +} { + switch (type) { + case ConversationTypeEnum.PRIVATE: { + const userConfs = retrieveResults.filter(m => + SnodeNamespace.isUserConfigNamespace(m.namespace) + ); + const userOthers = retrieveResults.filter( + m => !SnodeNamespace.isUserConfigNamespace(m.namespace) + ); + + const confMessages = retrieveItemWithNamespace(userConfs); + const otherMessages = retrieveItemWithNamespace(userOthers); + + return { + confMessages, + revokedMessages: null, + otherMessages: uniqBy(otherMessages, x => x.hash), + }; + } + + case ConversationTypeEnum.GROUP: + return { + confMessages: null, + otherMessages: retrieveItemWithNamespace(retrieveResults), + revokedMessages: null, + }; + + case ConversationTypeEnum.GROUPV2: { + const groupConfs = retrieveResults.filter(m => + SnodeNamespace.isGroupConfigNamespace(m.namespace) + ); + const groupRevoked = retrieveResults.filter( + m => m.namespace === SnodeNamespaces.ClosedGroupRevokedRetrievableMessages + ); + const groupOthers = retrieveResults.filter( + m => + !SnodeNamespace.isGroupConfigNamespace(m.namespace) && + m.namespace !== SnodeNamespaces.ClosedGroupRevokedRetrievableMessages + ); + + const groupConfMessages = retrieveItemWithNamespace(groupConfs); + const groupOtherMessages = retrieveItemWithNamespace(groupOthers); + const revokedMessages = retrieveItemWithNamespace(groupRevoked); + + return { + confMessages: groupConfMessages, + otherMessages: uniqBy(groupOtherMessages, x => x.hash), + revokedMessages, + }; + } + + default: + return { confMessages: null, otherMessages: [], revokedMessages: null }; } } + +async function decryptForGroupV2(retrieveResult: { + groupPk: string; + content: Uint8Array; +}): Promise { + window?.log?.info('received closed group message v2'); + try { + const groupPk = retrieveResult.groupPk; + if (!PubKey.is03Pubkey(groupPk)) { + throw new PreConditionFailed('decryptForGroupV2: not a 03 prefixed group'); + } + + const decrypted = await MetaGroupWrapperActions.decryptMessage(groupPk, retrieveResult.content); + // just try to parse what we have, it should be a protobuf content decrypted already + const parsedEnvelope = SignalService.Envelope.decode(new Uint8Array(decrypted.plaintext)); + + // not doing anything, just enforcing that the content is indeed a protobuf object of type Content, or throws + SignalService.Content.decode(parsedEnvelope.content); + + // the receiving pipeline relies on the envelope.senderIdentity field to know who is the author of a message + return { + id: v4(), + senderIdentity: decrypted.pubkeyHex, + receivedAt: Date.now(), + content: parsedEnvelope.content, + source: groupPk, + type: SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE, + timestamp: parsedEnvelope.timestamp, + }; + } catch (e) { + window.log.warn('failed to decrypt message with error: ', e.message); + return null; + } +} + +async function handleMessagesForGroupV2( + newMessages: Array, + groupPk: GroupPubkeyType +) { + for (let index = 0; index < newMessages.length; index++) { + const msg = newMessages[index]; + const retrieveResult = new Uint8Array(StringUtils.encode(msg.data, 'base64')); + try { + const envelopePlus = await decryptForGroupV2({ + content: retrieveResult, + groupPk, + }); + if (!envelopePlus) { + throw new Error('decryptForGroupV2 returned empty envelope'); + } + + // this is the processing of the message itself, which can be long. + // We allow 1 minute per message at most, which should be plenty + await Receiver.handleSwarmContentDecryptedWithTimeout({ + envelope: envelopePlus, + contentDecrypted: envelopePlus.content, + messageHash: msg.hash, + sentAtTimestamp: toNumber(envelopePlus.timestamp), + messageExpirationFromRetrieve: msg.expiration, + }); + } catch (e) { + window.log.warn('failed to handle groupv2 otherMessage because of: ', e.message); + } finally { + // that message was processed, add it to the seen messages list + try { + await Data.saveSeenMessageHashes([ + { + hash: msg.hash, + expiresAt: msg.expiration, + }, + ]); + } catch (e) { + window.log.warn('failed saveSeenMessageHashes: ', e.message); + } + } + } + + // make sure that all the message above are indeed seen (extra check as everything should already be marked as seen in the loop above) + await Data.saveSeenMessageHashes( + newMessages.map(m => ({ hash: m.hash, expiresAt: m.expiration })) + ); +} diff --git a/ts/session/apis/snode_api/swarm_polling_config/SwarmPollingGroupConfig.ts b/ts/session/apis/snode_api/swarm_polling_config/SwarmPollingGroupConfig.ts new file mode 100644 index 0000000000..36207386a2 --- /dev/null +++ b/ts/session/apis/snode_api/swarm_polling_config/SwarmPollingGroupConfig.ts @@ -0,0 +1,250 @@ +import { GroupPubkeyType } from 'libsession_util_nodejs'; +import { isEmpty, isFinite, isNumber } from 'lodash'; +import { to_hex } from 'libsodium-wrappers-sumo'; +import { Data } from '../../../../data/data'; +import { messagesExpired } from '../../../../state/ducks/conversations'; +import { groupInfoActions } from '../../../../state/ducks/metaGroups'; +import { + MetaGroupWrapperActions, + UserGroupsWrapperActions, +} from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { ed25519Str, fromBase64ToArray } from '../../../utils/String'; +import { GroupPendingRemovals } from '../../../utils/job_runners/jobs/GroupPendingRemovalsJob'; +import { LibSessionUtil } from '../../../utils/libsession/libsession_utils'; +import { SnodeNamespaces } from '../namespaces'; +import { RetrieveMessageItemWithNamespace } from '../types'; +import { ConvoHub } from '../../../conversations'; +import { ProfileManager } from '../../../profile_manager/ProfileManager'; +import { UserUtils } from '../../../utils'; +import { GroupSync } from '../../../utils/job_runners/jobs/GroupSyncJob'; +import { destroyMessagesAndUpdateRedux } from '../../../disappearing_messages'; + +/** + * This is a basic optimization to avoid running the logic when the `deleteBeforeSeconds` + * and the `deleteAttachBeforeSeconds` does not change between each polls. + * Essentially, when the `deleteBeforeSeconds` is set in the group info config, + * - on start that map will be empty so we will run the logic to delete any messages sent before that. + * - after each poll, we will only rerun the logic if the new `deleteBeforeSeconds` is higher than the current setting. + * + */ +const lastAppliedRemoveMsgSentBeforeSeconds = new Map(); +const lastAppliedRemoveAttachmentSentBeforeSeconds = new Map(); + +async function handleMetaMergeResults(groupPk: GroupPubkeyType) { + const infos = await MetaGroupWrapperActions.infoGet(groupPk); + if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { + const dumps = await MetaGroupWrapperActions.metaMakeDump(groupPk); + window.log.info( + `pushChangesToGroupSwarmIfNeeded: current meta dump: ${ed25519Str(groupPk)}:`, + to_hex(dumps) + ); + } + if (infos.isDestroyed) { + window.log.info(`${ed25519Str(groupPk)} is marked as destroyed after merge. Removing it.`); + await ConvoHub.use().deleteGroup(groupPk, { + sendLeaveMessage: false, + fromSyncMessage: false, + deletionType: 'keepAsDestroyed', // we just got something from the group's swarm, so it is not pendingInvite + deleteAllMessagesOnSwarm: false, + forceDestroyForAllMembers: false, + }); + } else { + if ( + isNumber(infos.deleteBeforeSeconds) && + isFinite(infos.deleteBeforeSeconds) && + infos.deleteBeforeSeconds > 0 && + (lastAppliedRemoveMsgSentBeforeSeconds.get(groupPk) || 0) < infos.deleteBeforeSeconds + ) { + // delete any messages in this conversation sent before that timestamp (in seconds) + const deletedMsgIds = await Data.removeAllMessagesInConversationSentBefore({ + deleteBeforeSeconds: infos.deleteBeforeSeconds, + conversationId: groupPk, + }); + window.log.info( + `removeAllMessagesInConversationSentBefore of ${ed25519Str(groupPk)} before ${infos.deleteBeforeSeconds}: `, + deletedMsgIds + ); + window.inboxStore?.dispatch( + messagesExpired(deletedMsgIds.map(messageId => ({ conversationKey: groupPk, messageId }))) + ); + lastAppliedRemoveMsgSentBeforeSeconds.set(groupPk, infos.deleteBeforeSeconds); + } + + if ( + isNumber(infos.deleteAttachBeforeSeconds) && + isFinite(infos.deleteAttachBeforeSeconds) && + infos.deleteAttachBeforeSeconds > 0 && + (lastAppliedRemoveAttachmentSentBeforeSeconds.get(groupPk) || 0) < + infos.deleteAttachBeforeSeconds + ) { + // delete any attachments in this conversation sent before that timestamp (in seconds) + const impactedMsgModels = await Data.getAllMessagesWithAttachmentsInConversationSentBefore({ + deleteAttachBeforeSeconds: infos.deleteAttachBeforeSeconds, + conversationId: groupPk, + }); + window.log.info( + `getAllMessagesWithAttachmentsInConversationSentBefore of ${ed25519Str(groupPk)} before ${infos.deleteAttachBeforeSeconds}: impactedMsgModelsIds `, + impactedMsgModels.map(m => m.id) + ); + + await destroyMessagesAndUpdateRedux( + impactedMsgModels.map(m => ({ conversationKey: groupPk, messageId: m.id })) + ); + + lastAppliedRemoveAttachmentSentBeforeSeconds.set(groupPk, infos.deleteAttachBeforeSeconds); + } + } + const membersWithPendingRemovals = + await MetaGroupWrapperActions.memberGetAllPendingRemovals(groupPk); + if (membersWithPendingRemovals.length) { + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (group && group.secretKey && !isEmpty(group.secretKey)) { + await GroupPendingRemovals.addJob({ groupPk }); + } + } + + const us = UserUtils.getOurPubKeyStrFromCache(); + const usMember = await MetaGroupWrapperActions.memberGet(groupPk, us); + let keysAlreadyHaveAdmin = await MetaGroupWrapperActions.keysAdmin(groupPk); + const secretKeyInUserWrapper = (await UserGroupsWrapperActions.getGroup(groupPk))?.secretKey; + + // load admin keys if needed + if ( + usMember && + secretKeyInUserWrapper && + !isEmpty(secretKeyInUserWrapper) && + !keysAlreadyHaveAdmin + ) { + try { + await MetaGroupWrapperActions.loadAdminKeys(groupPk, secretKeyInUserWrapper); + keysAlreadyHaveAdmin = await MetaGroupWrapperActions.keysAdmin(groupPk); + } catch (e) { + window.log.warn( + `tried to update our adminKeys/state for group ${ed25519Str(groupPk)} but failed with, ${e.message}` + ); + } + } + // mark ourselves as accepting the invite if needed + if (usMember?.memberStatus === 'INVITE_SENT' && keysAlreadyHaveAdmin) { + await MetaGroupWrapperActions.memberSetAccepted(groupPk, us); + } + // mark ourselves as accepting the promotion if needed + if (usMember?.memberStatus === 'PROMOTION_SENT' && keysAlreadyHaveAdmin) { + await MetaGroupWrapperActions.memberSetPromotionAccepted(groupPk, us); + } + // this won't do anything if there is no need for a sync, so we can safely plan one + await GroupSync.queueNewJobIfNeeded(groupPk); + + const convo = ConvoHub.use().get(groupPk); + const refreshedInfos = await MetaGroupWrapperActions.infoGet(groupPk); + + if (convo) { + let changes = false; + if (refreshedInfos.name !== convo.get('displayNameInProfile')) { + convo.set({ displayNameInProfile: refreshedInfos.name || undefined }); + changes = true; + } + const expectedMode = refreshedInfos.expirySeconds ? 'deleteAfterSend' : 'off'; + if ( + refreshedInfos.expirySeconds !== convo.get('expireTimer') || + expectedMode !== convo.get('expirationMode') + ) { + convo.set({ + expireTimer: refreshedInfos.expirySeconds || undefined, + expirationMode: expectedMode, + }); + changes = true; + } + if (changes) { + await convo.commit(); + } + } + + const members = await MetaGroupWrapperActions.memberGetAll(groupPk); + for (let index = 0; index < members.length; index++) { + const member = members[index]; + // if our DB doesn't have details about this user, set them. Otherwise we don't want to overwrite our changes with those + // because they are most likely out of date from what we get from the user himself. + const memberConvo = ConvoHub.use().get(member.pubkeyHex); + if (!memberConvo) { + continue; + } + if (member.name && member.name !== memberConvo.getRealSessionUsername()) { + // eslint-disable-next-line no-await-in-loop + await ProfileManager.updateProfileOfContact( + member.pubkeyHex, + member.name, + member.profilePicture?.url || null, + member.profilePicture?.key || null + ); + } + } +} + +async function handleGroupSharedConfigMessages( + groupConfigMessages: Array, + groupPk: GroupPubkeyType +) { + try { + window.log.info( + `received groupConfigMessages count: ${groupConfigMessages.length} for groupPk:${ed25519Str( + groupPk + )}` + ); + + if (groupConfigMessages.find(m => !m.storedAt)) { + throw new Error('all incoming group config message should have a timestamp'); + } + const infos = groupConfigMessages + .filter(m => m.namespace === SnodeNamespaces.ClosedGroupInfo) + .map(info => { + return { data: fromBase64ToArray(info.data), hash: info.hash }; + }); + const members = groupConfigMessages + .filter(m => m.namespace === SnodeNamespaces.ClosedGroupMembers) + .map(info => { + return { data: fromBase64ToArray(info.data), hash: info.hash }; + }); + const keys = groupConfigMessages + .filter(m => m.namespace === SnodeNamespaces.ClosedGroupKeys) + .map(info => { + return { + data: fromBase64ToArray(info.data), + hash: info.hash, + timestampMs: info.storedAt, + }; + }); + const toMerge = { + groupInfo: infos, + groupKeys: keys, + groupMember: members, + }; + + window.log.info( + `received keys:${toMerge.groupKeys.length}, infos:${toMerge.groupInfo.length}, members:${ + toMerge.groupMember.length + } for groupPk:${ed25519Str(groupPk)}` + ); + // do the merge with our current state + await MetaGroupWrapperActions.metaMerge(groupPk, toMerge); + + await handleMetaMergeResults(groupPk); + + // save updated dumps to the DB right away + await LibSessionUtil.saveDumpsToDb(groupPk); + + // refresh the redux slice with the merged result + window.inboxStore?.dispatch( + groupInfoActions.refreshGroupDetailsFromWrapper({ + groupPk, + }) as any + ); + } catch (e) { + window.log.warn( + `handleGroupSharedConfigMessages of ${groupConfigMessages.length} failed with ${e.message}` + ); + // not rethrowing + } +} + +export const SwarmPollingGroupConfig = { handleGroupSharedConfigMessages }; diff --git a/ts/session/apis/snode_api/swarm_polling_config/SwarmPollingUserConfig.ts b/ts/session/apis/snode_api/swarm_polling_config/SwarmPollingUserConfig.ts new file mode 100644 index 0000000000..36a2e1c3a4 --- /dev/null +++ b/ts/session/apis/snode_api/swarm_polling_config/SwarmPollingUserConfig.ts @@ -0,0 +1,33 @@ +import { ConfigMessageHandler } from '../../../../receiver/configMessage'; +import { RetrieveMessageItemWithNamespace } from '../types'; + +async function handleUserSharedConfigMessages( + userConfigMessagesMerged: Array +) { + try { + if (userConfigMessagesMerged.length) { + window.log.info( + `received userConfigMessagesMerged count: ${userConfigMessagesMerged.length}` + ); + + try { + window.log.info( + `handleConfigMessagesViaLibSession of "${userConfigMessagesMerged.length}" messages with libsession` + ); + await ConfigMessageHandler.handleUserConfigMessagesViaLibSession(userConfigMessagesMerged); + } catch (e) { + const allMessageHashes = userConfigMessagesMerged.map(m => m.hash).join(','); + window.log.warn( + `failed to handle messages hashes "${allMessageHashes}" with libsession. Error: "${e.message}"` + ); + } + } + } catch (e) { + window.log.warn( + `handleSharedConfigMessages of ${userConfigMessagesMerged.length} failed with ${e.message}` + ); + // not rethrowing + } +} + +export const SwarmPollingUserConfig = { handleUserSharedConfigMessages }; diff --git a/ts/session/apis/snode_api/types.ts b/ts/session/apis/snode_api/types.ts index 9a7925775e..e3eeae5873 100644 --- a/ts/session/apis/snode_api/types.ts +++ b/ts/session/apis/snode_api/types.ts @@ -1,13 +1,19 @@ +import { GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; + import { SnodeNamespaces } from './namespaces'; +import { SubaccountRevokeSubRequest, SubaccountUnrevokeSubRequest } from './SnodeRequestTypes'; +import { WithSignature, WithTimestamp } from '../../types/with'; export type RetrieveMessageItem = { hash: string; expiration: number; data: string; // base64 encrypted content of the message - timestamp: number; + storedAt: number; // **not** the envelope timestamp, but when the message was effectively stored on the snode }; -export type RetrieveMessageItemWithNamespace = RetrieveMessageItem & { namespace: number }; +export type RetrieveMessageItemWithNamespace = RetrieveMessageItem & { + namespace: SnodeNamespaces; // the namespace from which this message was fetched +}; export type RetrieveMessagesResultsContent = { hf?: Array; @@ -21,11 +27,32 @@ export type RetrieveRequestResult = { messages: RetrieveMessagesResultsContent; namespace: SnodeNamespaces; }; +export type WithMessagesHashes = { messagesHashes: Array }; export type RetrieveMessagesResultsBatched = Array; +export type ShortenOrExtend = 'extend' | 'shorten' | ''; +export type WithShortenOrExtend = { shortenOrExtend: ShortenOrExtend }; + +export type WithRevokeSubRequest = { + revokeSubRequest?: SubaccountRevokeSubRequest; + unrevokeSubRequest?: SubaccountUnrevokeSubRequest; +}; + +export type SignedHashesParams = WithSignature & { + pubkey: PubkeyType; + pubkey_ed25519: PubkeyType; + messages: Array; +}; + +export type SignedGroupHashesParams = WithTimestamp & + WithSignature & { + pubkey: GroupPubkeyType; + messages: Array; + }; + /** inherits from https://api.oxen.io/storage-rpc/#/recursive?id=recursive but we only care about these values */ -export type ExpireMessageResultItem = { +export type ExpireMessageResultItem = WithSignature & { /** the expiry timestamp that was applied (which might be different from the request expiry */ expiry: number; /** ( PUBKEY_HEX || EXPIRY || RMSGs... || UMSGs... || CMSG_EXPs... ) @@ -34,7 +61,6 @@ export type ExpireMessageResultItem = { CMSG_EXPs are (HASH || EXPIRY) values, ascii-sorted by hash, for the unchanged message hashes included in the "unchanged" field. The signature uses the node's ed25519 pubkey. */ - signature: string; /** Record of , but did not get updated due to "shorten"/"extend" in the request. This field is only included when "shorten /extend" is explicitly given. */ unchanged?: Record; /** ascii-sorted list of hashes that had their expiries changed (messages that were not found, and messages excluded by the shorten/extend options, are not included) */ diff --git a/ts/session/constants.ts b/ts/session/constants.ts index bcd5acef3c..6f03bf9fd8 100644 --- a/ts/session/constants.ts +++ b/ts/session/constants.ts @@ -92,6 +92,10 @@ export const VALIDATION = { export const DEFAULT_RECENT_REACTS = ['😂', '🥰', '😢', '😡', '😮', '😈']; export const REACT_LIMIT = 6; +export const MAX_USERNAME_BYTES = 64; + +export const UPDATER_INTERVAL_MS = 10 * DURATION.MINUTES; + export const FEATURE_RELEASE_TIMESTAMPS = { DISAPPEARING_MESSAGES_V2: 1710284400000, // 13/03/2024 10:00 Melbourne time USER_CONFIG: 1690761600000, // Monday July 31st at 10am Melbourne time diff --git a/ts/session/conversations/ConversationController.ts b/ts/session/conversations/ConversationController.ts index 73ef72d5b0..a11f372ad2 100644 --- a/ts/session/conversations/ConversationController.ts +++ b/ts/session/conversations/ConversationController.ts @@ -1,6 +1,6 @@ /* eslint-disable no-await-in-loop */ /* eslint-disable more/no-then */ -import { ConvoVolatileType } from 'libsession_util_nodejs'; +import { ConvoVolatileType, GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; import { isEmpty, isNil } from 'lodash'; import { Data } from '../../data/data'; @@ -12,48 +12,61 @@ import { } from '../../state/ducks/conversations'; import { BlockedNumberController } from '../../util'; import { getOpenGroupManager } from '../apis/open_group_api/opengroupV2/OpenGroupManagerV2'; -import { getSwarmFor } from '../apis/snode_api/snodePool'; import { PubKey } from '../types'; -import { getMessageQueue } from '..'; +import { ConfigDumpData } from '../../data/configDump/configDump'; import { deleteAllMessagesByConvoIdNoConfirmation } from '../../interactions/conversationInteractions'; import { removeAllClosedGroupEncryptionKeyPairs } from '../../receiver/closedGroups'; +import { groupInfoActions } from '../../state/ducks/metaGroups'; import { getCurrentlySelectedConversationOutsideRedux } from '../../state/selectors/conversations'; import { assertUnreachable } from '../../types/sqlSharedTypes'; -import { UserGroupsWrapperActions } from '../../webworker/workers/browser/libsession_worker_interface'; +import { + MetaGroupWrapperActions, + UserGroupsWrapperActions, +} from '../../webworker/workers/browser/libsession_worker_interface'; import { OpenGroupUtils } from '../apis/open_group_api/utils'; import { getSwarmPollingInstance } from '../apis/snode_api'; -import { GetNetworkTime } from '../apis/snode_api/getNetworkTime'; +import { DeleteAllFromGroupMsgNodeSubRequest } from '../apis/snode_api/SnodeRequestTypes'; import { SnodeNamespaces } from '../apis/snode_api/namespaces'; import { ClosedGroupMemberLeftMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupMemberLeftMessage'; +import { GroupUpdateMemberLeftMessage } from '../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberLeftMessage'; +import { GroupUpdateMemberLeftNotificationMessage } from '../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberLeftNotificationMessage'; +import { MessageQueue, MessageSender } from '../sending'; import { UserUtils } from '../utils'; -import { ConfigurationSync } from '../utils/job_runners/jobs/ConfigurationSyncJob'; +import { ed25519Str } from '../utils/String'; +import { PreConditionFailed } from '../utils/errors'; +import { RunJobResult } from '../utils/job_runners/PersistedJob'; +import { GroupSync } from '../utils/job_runners/jobs/GroupSyncJob'; +import { UserSync } from '../utils/job_runners/jobs/UserSyncJob'; import { LibSessionUtil } from '../utils/libsession/libsession_utils'; import { SessionUtilContact } from '../utils/libsession/libsession_utils_contacts'; import { SessionUtilConvoInfoVolatile } from '../utils/libsession/libsession_utils_convo_info_volatile'; import { SessionUtilUserGroups } from '../utils/libsession/libsession_utils_user_groups'; +import { DisappearingMessages } from '../disappearing_messages'; +import { StoreGroupRequestFactory } from '../apis/snode_api/factories/StoreGroupRequestFactory'; import { CONVERSATION_PRIORITIES, ConversationTypeEnum } from '../../models/types'; +import { NetworkTime } from '../../util/NetworkTime'; -let instance: ConversationController | null; +let instance: ConvoController | null; -export const getConversationController = () => { +const getConvoHub = () => { if (instance) { return instance; } - instance = new ConversationController(); + instance = new ConvoController(); return instance; }; type DeleteOptions = { fromSyncMessage: boolean }; -export class ConversationController { +class ConvoController { private readonly conversations: ConversationCollection; private _initialFetchComplete: boolean = false; - private _initialPromise?: Promise; + private _convoHubInitialPromise?: Promise; /** - * Do not call this constructor. You get the ConversationController through getConversationController() only + * Do not call this constructor. You get the ConvoHub through ConvoHub.use() only */ constructor() { this.conversations = new ConversationCollection(); @@ -62,7 +75,7 @@ export class ConversationController { // FIXME this could return | undefined public get(id: string): ConversationModel { if (!this._initialFetchComplete) { - throw new Error('getConversationController().get() needs complete initial fetch'); + throw new Error('ConvoHub.use().get() needs complete initial fetch'); } return this.conversations.get(id); @@ -70,7 +83,7 @@ export class ConversationController { public getOrThrow(id: string): ConversationModel { if (!this._initialFetchComplete) { - throw new Error('getConversationController().get() needs complete initial fetch'); + throw new Error('ConvoHub.use().get() needs complete initial fetch'); } const convo = this.conversations.get(id); @@ -78,7 +91,7 @@ export class ConversationController { if (convo) { return convo; } - throw new Error(`Conversation ${id} does not exist on getConversationController().get()`); + throw new Error(`Conversation ${id} does not exist on ConvoHub.use().get()`); } // Needed for some model setup which happens during the initial fetch() call below public getUnsafe(id: string): ConversationModel | undefined { @@ -93,19 +106,19 @@ export class ConversationController { if ( type !== ConversationTypeEnum.PRIVATE && type !== ConversationTypeEnum.GROUP && - type !== ConversationTypeEnum.GROUPV3 + type !== ConversationTypeEnum.GROUPV2 ) { - throw new TypeError(`'type' must be 'private' or 'group' or 'groupv3' but got: '${type}'`); + throw new TypeError(`'type' must be 'private' or 'group' or 'groupv2' but got: '${type}'`); } - if (type === ConversationTypeEnum.GROUPV3 && !PubKey.isClosedGroupV3(id)) { + if (type === ConversationTypeEnum.GROUPV2 && !PubKey.is03Pubkey(id)) { throw new Error( - 'required v3 closed group` ` but the pubkey does not match the 03 prefix for them' + 'required v3 closed group but the pubkey does not match the 03 prefix for them' ); } if (!this._initialFetchComplete) { - throw new Error('getConversationController().get() needs complete initial fetch'); + throw new Error('ConvoHub.use().get() needs complete initial fetch'); } if (this.conversations.get(id)) { @@ -139,11 +152,6 @@ export class ConversationController { }) ); - if (!conversation.isPublic() && conversation.isActive()) { - // NOTE: we request snodes updating the cache, but ignore the result - - void getSwarmFor(id); - } return conversation; }; @@ -153,7 +161,7 @@ export class ConversationController { } public getContactProfileNameOrShortenedPubKey(pubKey: string): string { - const conversation = getConversationController().get(pubKey); + const conversation = ConvoHub.use().get(pubKey); if (!conversation) { return pubKey; } @@ -164,21 +172,21 @@ export class ConversationController { id: string | PubKey, type: ConversationTypeEnum ): Promise { - const initialPromise = - this._initialPromise !== undefined ? this._initialPromise : Promise.resolve(); - return initialPromise.then(() => { - if (!id) { - return Promise.reject(new Error('getOrCreateAndWait: invalid id passed.')); - } - const pubkey = id && (id as any).key ? (id as any).key : id; - const conversation = this.getOrCreate(pubkey, type); + const convoHubInitialPromise = + this._convoHubInitialPromise !== undefined ? this._convoHubInitialPromise : Promise.resolve(); + await convoHubInitialPromise; - if (conversation) { - return conversation.initialPromise.then(() => conversation); - } + if (!id) { + throw new Error('getOrCreateAndWait: invalid id passed.'); + } + const pubkey = id && (id as any).key ? (id as any).key : id; + const conversation = this.getOrCreate(pubkey, type); - return Promise.reject(new Error('getOrCreateAndWait: did not get conversation')); - }); + if (conversation) { + return conversation.initialPromise.then(() => conversation); + } + + return Promise.reject(new Error('getOrCreateAndWait: did not get conversation')); } /** @@ -188,9 +196,7 @@ export class ConversationController { */ public async deleteBlindedContact(blindedId: string) { if (!this._initialFetchComplete) { - throw new Error( - 'getConversationController().deleteBlindedContact() needs complete initial fetch' - ); + throw new Error('ConvoHub.use().deleteBlindedContact() needs complete initial fetch'); } if (!PubKey.isBlinded(blindedId)) { throw new Error('deleteBlindedContact allow accepts blinded id'); @@ -210,40 +216,174 @@ export class ConversationController { await conversation.commit(); } - public async deleteClosedGroup( - groupId: string, - options: DeleteOptions & { sendLeaveMessage: boolean; forceDeleteLocal?: boolean } + public async deleteLegacyGroup( + groupPk: PubkeyType, + { sendLeaveMessage, fromSyncMessage }: DeleteOptions & { sendLeaveMessage: boolean } ) { - const conversation = await this.deleteConvoInitialChecks(groupId, 'LegacyGroup'); + if (!PubKey.is05Pubkey(groupPk)) { + throw new PreConditionFailed('deleteLegacyGroup excepts a 05 group'); + } + + window.log.info( + `deleteLegacyGroup: ${ed25519Str(groupPk)}, sendLeaveMessage:${sendLeaveMessage}, fromSyncMessage:${fromSyncMessage}` + ); + + // this deletes all messages in the conversation + const conversation = await this.deleteConvoInitialChecks(groupPk, 'LegacyGroup', false); if (!conversation || !conversation.isClosedGroup()) { return; } - window.log.info(`deleteClosedGroup: ${groupId}, sendLeaveMessage?:${options.sendLeaveMessage}`); - getSwarmPollingInstance().removePubkey(groupId); // we don't need to keep polling anymore. + // we don't need to keep polling anymore. + getSwarmPollingInstance().removePubkey(groupPk, 'deleteLegacyGroup'); - if (!options.forceDeleteLocal) { - await leaveClosedGroup(groupId, options.fromSyncMessage); - window.log.info( - `deleteClosedGroup: ${groupId}, sendLeaveMessage?:${options.sendLeaveMessage}` - ); + // send the leave message before we delete everything for this group (including the key!) + if (sendLeaveMessage) { + await leaveClosedGroup(groupPk, fromSyncMessage); + } - if (options.sendLeaveMessage) { - await leaveClosedGroup(groupId, options.fromSyncMessage); + await removeLegacyGroupFromWrappers(groupPk); + + // we never keep a left legacy group. Only fully remove it. + await this.removeGroupOrCommunityFromDBAndRedux(groupPk); + await UserSync.queueNewJobIfNeeded(); + } + + public async deleteGroup( + groupPk: GroupPubkeyType, + { + sendLeaveMessage, + fromSyncMessage, + deletionType, + deleteAllMessagesOnSwarm, + forceDestroyForAllMembers, + }: DeleteOptions & { + sendLeaveMessage: boolean; + deletionType: 'doNotKeep' | 'keepAsKicked' | 'keepAsDestroyed'; + deleteAllMessagesOnSwarm: boolean; + forceDestroyForAllMembers: boolean; + } + ) { + if (!PubKey.is03Pubkey(groupPk)) { + throw new PreConditionFailed('deleteGroup excepts a 03-group'); + } + + window.log.info( + `deleteGroup: ${ed25519Str(groupPk)}, sendLeaveMessage:${sendLeaveMessage}, fromSyncMessage:${fromSyncMessage}, deletionType:${deletionType}, deleteAllMessagesOnSwarm:${deleteAllMessagesOnSwarm}, forceDestroyForAllMembers:${forceDestroyForAllMembers}` + ); + + // this deletes all messages in the conversation + const conversation = await this.deleteConvoInitialChecks(groupPk, 'Group', false); + if (!conversation || !conversation.isClosedGroup()) { + return; + } + // we don't need to keep polling anymore. + getSwarmPollingInstance().removePubkey(groupPk, 'deleteGroup'); + + const groupInUserGroup = await UserGroupsWrapperActions.getGroup(groupPk); + + // send the leave message before we delete everything for this group (including the key!) + // Note: if we were kicked, we already lost the authData/secretKey for it, so no need to try to send our message. + if (sendLeaveMessage && !groupInUserGroup?.kicked) { + const failedToSendLeaveMessage = await leaveClosedGroup(groupPk, fromSyncMessage); + if (PubKey.is03Pubkey(groupPk) && failedToSendLeaveMessage) { + // this is caught and is adding an interaction notification message + throw new Error('Failed to send our leaving message to 03 group'); } } + // a group 03 can be removed fully or kept empty as kicked. + // when it was pendingInvite, we delete it fully, + // when it was not, we empty the group but keep it with the "you have been kicked" message + // Note: the pendingInvite=true case cannot really happen as we wouldn't be polling from that group (and so, not get the message kicking us) + if (deletionType === 'keepAsKicked' || deletionType === 'keepAsDestroyed') { + // delete the secretKey/authData if we had it. If we need it for something, it has to be done before this call. + if (groupInUserGroup) { + groupInUserGroup.authData = null; + groupInUserGroup.secretKey = null; + groupInUserGroup.disappearingTimerSeconds = undefined; + + // we want to update the groupName in user group with whatever is in the groupInfo, + // so even if the group is not polled anymore, we have an up to date name on restore. + let nameInMetaGroup: string | undefined; + try { + const metaGroup = await MetaGroupWrapperActions.infoGet(groupPk); + if (metaGroup && metaGroup.name && !isEmpty(metaGroup.name)) { + nameInMetaGroup = metaGroup.name; + } + } catch (e) { + // nothing to do + } + if (groupInUserGroup && nameInMetaGroup && groupInUserGroup.name !== nameInMetaGroup) { + groupInUserGroup.name = nameInMetaGroup; + } + await UserGroupsWrapperActions.setGroup(groupInUserGroup); + if (deletionType === 'keepAsKicked') { + await UserGroupsWrapperActions.markGroupKicked(groupPk); + } else { + await UserGroupsWrapperActions.markGroupDestroyed(groupPk); + } + } + } else { + try { + const us = UserUtils.getOurPubKeyStrFromCache(); + const allMembers = await MetaGroupWrapperActions.memberGetAll(groupPk); + const otherAdminsCount = allMembers + .filter(m => m.nominatedAdmin) + .filter(m => m.pubkeyHex !== us).length; + const weAreLastAdmin = otherAdminsCount === 0; + const infos = await MetaGroupWrapperActions.infoGet(groupPk); + const fromUserGroup = await UserGroupsWrapperActions.getGroup(groupPk); + if (!infos || !fromUserGroup || isEmpty(infos) || isEmpty(fromUserGroup)) { + throw new Error('deleteGroup: some required data not present'); + } + const { secretKey } = fromUserGroup; + + // check if we are the last admin + if (secretKey && !isEmpty(secretKey) && (weAreLastAdmin || forceDestroyForAllMembers)) { + const deleteAllMessagesSubRequest = deleteAllMessagesOnSwarm + ? new DeleteAllFromGroupMsgNodeSubRequest({ + groupPk, + secretKey, + }) + : undefined; + + // this marks the group info as deleted. We need to push those details + await MetaGroupWrapperActions.infoDestroy(groupPk); + const lastPushResult = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + deleteAllMessagesSubRequest, + extraStoreRequests: [], + }); + if (lastPushResult !== RunJobResult.Success) { + throw new Error(`Failed to destroyGroupDetails for pk ${ed25519Str(groupPk)}`); + } + } + } catch (e) { + // if that group was already freed this will happen. + // we still want to delete it entirely though + window.log.warn(`deleteGroup: MetaGroupWrapperActions failed with: ${e.message}`); + } - // if we were kicked or sent our left message, we have nothing to do more with that group. - // Just delete everything related to it, not trying to add update message or send a left message. - await this.removeGroupOrCommunityFromDBAndRedux(groupId); - await removeLegacyGroupFromWrappers(groupId); + // this deletes the secretKey if we had it. If we need it for something, it has to be done before this call. + await UserGroupsWrapperActions.eraseGroup(groupPk); - if (!options.fromSyncMessage) { - await ConfigurationSync.queueNewJobIfNeeded(); + // we are on the emptyGroupButKeepAsKicked=false case, so we remove it all + await this.removeGroupOrCommunityFromDBAndRedux(groupPk); } + + await SessionUtilConvoInfoVolatile.removeGroupFromWrapper(groupPk); + // release the memory (and the current meta-dumps in memory for that group) + window.log.info(`freeing meta group wrapper: ${ed25519Str(groupPk)}`); + await MetaGroupWrapperActions.free(groupPk); + // delete the dumps from the meta group state only, not the details in the UserGroups wrapper itself. + await ConfigDumpData.deleteDumpFor(groupPk); + getSwarmPollingInstance().removePubkey(groupPk, 'deleteGroup'); + + window.inboxStore?.dispatch(groupInfoActions.removeGroupDetailsFromSlice({ groupPk })); + await UserSync.queueNewJobIfNeeded(); } public async deleteCommunity(convoId: string, options: DeleteOptions) { - const conversation = await this.deleteConvoInitialChecks(convoId, 'Community'); + const conversation = await this.deleteConvoInitialChecks(convoId, 'Community', false); if (!conversation || !conversation.isPublic()) { return; } @@ -257,15 +397,15 @@ export class ConversationController { await this.removeGroupOrCommunityFromDBAndRedux(conversation.id); if (!options.fromSyncMessage) { - await ConfigurationSync.queueNewJobIfNeeded(); + await UserSync.queueNewJobIfNeeded(); } } public async delete1o1( id: string, - options: DeleteOptions & { justHidePrivate?: boolean; keepMessages?: boolean } + options: DeleteOptions & { justHidePrivate?: boolean; keepMessages: boolean } ) { - const conversation = await this.deleteConvoInitialChecks(id, '1o1', options?.keepMessages); + const conversation = await this.deleteConvoInitialChecks(id, '1o1', options.keepMessages); if (!conversation || !conversation.isPrivate()) { return; @@ -294,7 +434,7 @@ export class ConversationController { } if (conversation.id.startsWith('05')) { // make sure to filter blinded contacts as it will throw otherwise - await SessionUtilContact.removeContactFromWrapper(conversation.id); // then remove the entry alltogether from the wrapper + await SessionUtilContact.removeContactFromWrapper(conversation.id); // then remove the entry altogether from the wrapper await SessionUtilConvoInfoVolatile.removeContactFromWrapper(conversation.id); } if (getCurrentlySelectedConversationOutsideRedux() === conversation.id) { @@ -303,7 +443,7 @@ export class ConversationController { } if (!options.fromSyncMessage) { - await ConfigurationSync.queueNewJobIfNeeded(); + await UserSync.queueNewJobIfNeeded(); } } @@ -311,7 +451,7 @@ export class ConversationController { * * @returns the reference of the list of conversations stored. * Warning: You should not edit things directly from that list. This must only be used for reading things. - * If you need to make a change, do the usual getConversationControler().get('the id you want to edit') + * If you need to make a change, do the usual ConvoHub.use().get('the id you want to edit') */ public getConversations(): Array { return this.conversations.models; @@ -379,19 +519,18 @@ export class ConversationController { throw error; } }; - await BlockedNumberController.load(); - this._initialPromise = load(); + this._convoHubInitialPromise = load(); - return this._initialPromise; + return this._convoHubInitialPromise; } public loadPromise() { - return this._initialPromise; + return this._convoHubInitialPromise; } public reset() { - this._initialPromise = Promise.resolve(); + this._convoHubInitialPromise = Promise.resolve(); this._initialFetchComplete = false; if (window?.inboxStore) { window.inboxStore?.dispatch(conversationActions.removeAllConversations()); @@ -402,37 +541,37 @@ export class ConversationController { private async deleteConvoInitialChecks( convoId: string, deleteType: ConvoVolatileType, - keepMessages?: boolean + keepMessages: boolean ) { if (!this._initialFetchComplete) { - throw new Error(`getConversationController.${deleteType} needs to complete initial fetch`); + throw new Error(`ConvoHub.${deleteType} needs complete initial fetch`); } - window.log.info(`${deleteType} with ${convoId}`); + window.log.info(`${deleteType} with ${ed25519Str(convoId)}`); const conversation = this.conversations.get(convoId); if (!conversation) { - window.log.warn(`${deleteType} no such convo ${convoId}`); + window.log.warn(`${deleteType} no such convo ${ed25519Str(convoId)}`); return null; } // Note in some cases (hiding a conversation) we don't want to delete the messages if (!keepMessages) { // those are the stuff to do for all conversation types - window.log.info(`${deleteType} destroyingMessages: ${convoId}`); + window.log.info(`${deleteType} destroyingMessages: ${ed25519Str(convoId)}`); await deleteAllMessagesByConvoIdNoConfirmation(convoId); - window.log.info(`${deleteType} messages destroyed: ${convoId}`); + window.log.info(`${deleteType} messages destroyed: ${ed25519Str(convoId)}`); } return conversation; } private async removeGroupOrCommunityFromDBAndRedux(convoId: string) { - window.log.info(`cleanUpGroupConversation, removing convo from DB: ${convoId}`); + window.log.info(`cleanUpGroupConversation, removing convo from DB: ${ed25519Str(convoId)}`); // not a private conversation, so not a contact for the ContactWrapper await Data.removeConversation(convoId); - // remove the data from the opengrouprooms table too if needed + // remove the data from the opengroup rooms table too if needed if (convoId && OpenGroupUtils.isOpenGroupV2(convoId)) { // remove the roomInfos locally for this open group room including the pubkey try { @@ -442,7 +581,7 @@ export class ConversationController { } } - window.log.info(`cleanUpGroupConversation, convo removed from DB: ${convoId}`); + window.log.info(`cleanUpGroupConversation, convo removed from DB: ${ed25519Str(convoId)}`); const conversation = this.conversations.get(convoId); if (conversation) { @@ -454,26 +593,30 @@ export class ConversationController { } window.inboxStore?.dispatch(conversationActions.conversationRemoved(convoId)); - window.log.info(`cleanUpGroupConversation, convo removed from store: ${convoId}`); + window.log.info(`cleanUpGroupConversation, convo removed from store: ${ed25519Str(convoId)}`); } } /** - * You most likely don't want to call this function directly, but instead use the deleteLegacyGroup() from the ConversationController as it will take care of more cleaningup. + * You most likely don't want to call this function directly, but instead use the deleteLegacyGroup() + * from the ConversationController as it will take care of more cleaning up. + * This throws if a leaveMessage needs to be sent, but fails to be sent. * * Note: `fromSyncMessage` is used to know if we need to send a leave group message to the group first. * So if the user made the action on this device, fromSyncMessage should be false, but if it happened from a linked device polled update, set this to true. + * + * @returns true if the message failed to be sent. */ -async function leaveClosedGroup(groupId: string, fromSyncMessage: boolean) { - const convo = getConversationController().get(groupId); +async function leaveClosedGroup(groupPk: PubkeyType | GroupPubkeyType, fromSyncMessage: boolean) { + const convo = ConvoHub.use().get(groupPk); if (!convo || !convo.isClosedGroup()) { window?.log?.error('Cannot leave non-existing group'); - return; + return false; } const ourNumber = UserUtils.getOurPubKeyStrFromCache(); - const isCurrentUserAdmin = convo.get('groupAdmins')?.includes(ourNumber); + const isCurrentUserAdmin = convo.weAreAdminUnblinded(); let members: Array = []; let admins: Array = []; @@ -487,60 +630,121 @@ async function leaveClosedGroup(groupId: string, fromSyncMessage: boolean) { } else { // otherwise, just the exclude ourself from the members and trigger an update with this convo.set({ left: true }); - members = (convo.get('members') || []).filter((m: string) => m !== ourNumber); - admins = convo.get('groupAdmins') || []; + members = (convo.getGroupMembers() || []).filter((m: string) => m !== ourNumber); + admins = convo.getGroupAdmins(); } convo.set({ members }); await convo.updateGroupAdmins(admins, false); await convo.commit(); - const networkTimestamp = GetNetworkTime.getNowWithNetworkOffset(); - - getSwarmPollingInstance().removePubkey(groupId); + getSwarmPollingInstance().removePubkey(groupPk, 'leaveClosedGroup'); if (fromSyncMessage) { // no need to send our leave message as our other device should already have sent it. - return; + return false; } - const keypair = await Data.getLatestClosedGroupEncryptionKeyPair(groupId); - if (!keypair || isEmpty(keypair) || isEmpty(keypair.publicHex) || isEmpty(keypair.privateHex)) { - // if we do not have a keypair, we won't be able to send our leaving message neither, so just skip sending it. - // this can happen when getting a group from a broken libsession usergroup wrapper, but not only. - return; + if (PubKey.is03Pubkey(groupPk)) { + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!group || (!group.secretKey && !group.authData)) { + throw new Error('leaveClosedGroup: group from UserGroupsWrapperActions is null '); + } + const createAtNetworkTimestamp = NetworkTime.now(); + // Send the update to the 03 group + const ourLeavingMessage = new GroupUpdateMemberLeftMessage({ + createAtNetworkTimestamp, + groupPk, + expirationType: null, // we keep that one **not** expiring + expireTimer: null, + }); + + const ourLeavingNotificationMessage = new GroupUpdateMemberLeftNotificationMessage({ + createAtNetworkTimestamp, + groupPk, + ...DisappearingMessages.getExpireDetailsForOutgoingMessage(convo, createAtNetworkTimestamp), // this one should be expiring with the convo expiring details + }); + + window?.log?.info( + `We are leaving the group ${ed25519Str(groupPk)}. Sending our leaving messages.` + ); + let failedToSent03LeaveMessage = false; + // We might not be able to send our leaving messages (no encryption key pair, we were already removed, no network, etc). + // If that happens, we should just remove everything from our current user. + try { + const storeRequests = await StoreGroupRequestFactory.makeGroupMessageSubRequest( + [ourLeavingNotificationMessage, ourLeavingMessage], + { + authData: group.authData, + secretKey: group.secretKey, + } + ); + const results = await MessageSender.sendEncryptedDataToSnode({ + destination: groupPk, + sortedSubRequests: storeRequests, + method: 'sequence', + }); + + if (results?.[0].code !== 200) { + throw new Error( + `Even with the retries, leaving message for group ${ed25519Str( + groupPk + )} failed to be sent...` + ); + } + } catch (e) { + window?.log?.warn( + `failed to send our leaving messages for ${ed25519Str(groupPk)}:${e.message}` + ); + failedToSent03LeaveMessage = true; + } + + // the rest of the cleaning of that conversation is done in the `deleteClosedGroup()` + + return failedToSent03LeaveMessage; + } + + // TODO remove legacy group support + const keyPair = await Data.getLatestClosedGroupEncryptionKeyPair(groupPk); + if (!keyPair || isEmpty(keyPair) || isEmpty(keyPair.publicHex) || isEmpty(keyPair.privateHex)) { + // if we do not have a keyPair, we won't be able to send our leaving message neither, so just skip sending it. + // this can happen when getting a group from a broken libsession user group wrapper, but not only. + return false; } // Send the update to the group const ourLeavingMessage = new ClosedGroupMemberLeftMessage({ - timestamp: networkTimestamp, - groupId, + createAtNetworkTimestamp: NetworkTime.now(), + groupId: groupPk, expirationType: null, // we keep that one **not** expiring expireTimer: null, }); - window?.log?.info(`We are leaving the group ${groupId}. Sending our leaving message.`); - // if we do not have a keypair for that group, we can't send our leave message, so just skip the message sending part - const wasSent = await getMessageQueue().sendToPubKeyNonDurably({ + window?.log?.info(`We are leaving the legacy group ${groupPk}. Sending our leaving message.`); + + // if we do not have a keyPair for that group, we can't send our leave message, so just skip the message sending part + const wasSent = await MessageQueue.use().sendToLegacyGroupNonDurably({ message: ourLeavingMessage, - namespace: SnodeNamespaces.ClosedGroupMessage, - pubkey: PubKey.cast(groupId), + namespace: SnodeNamespaces.LegacyClosedGroup, + destination: groupPk, }); - // TODO our leaving message might fail to be sent for some specific reason we want to still delete the group. - // for instance, if we do not have the encryption keypair anymore, we cannot send our left message, but we should still delete it's content + // The leaving message might fail to be sent for some specific reason we want to still delete the group. + // For instance, if we do not have the encryption keyPair anymore, we cannot send our left message, but we should still delete its content if (wasSent) { window?.log?.info( - `Leaving message sent ${groupId}. Removing everything related to this group.` + `Leaving message sent ${ed25519Str(groupPk)}. Removing everything related to this group.` ); } else { window?.log?.info( - `Leaving message failed to be sent for ${groupId}. But still removing everything related to this group....` + `Leaving message failed to be sent for ${ed25519Str( + groupPk + )}. But still removing everything related to this group....` ); } - // the rest of the cleaning of that conversation is done in the `deleteClosedGroup()` + return wasSent; } async function removeLegacyGroupFromWrappers(groupId: string) { - getSwarmPollingInstance().removePubkey(groupId); + getSwarmPollingInstance().removePubkey(groupId, 'removeLegacyGroupFromWrappers'); await UserGroupsWrapperActions.eraseLegacyGroup(groupId); await SessionUtilConvoInfoVolatile.removeLegacyGroupFromWrapper(groupId); @@ -570,3 +774,5 @@ async function removeCommunityFromWrappers(conversationId: string) { window?.log?.info('SessionUtilUserGroups.removeCommunityFromWrapper failed:', e.message); } } + +export const ConvoHub = { use: getConvoHub }; diff --git a/ts/session/conversations/createClosedGroup.ts b/ts/session/conversations/createClosedGroup.ts index 250ba55ea9..dc9378ccfc 100644 --- a/ts/session/conversations/createClosedGroup.ts +++ b/ts/session/conversations/createClosedGroup.ts @@ -1,6 +1,4 @@ -import _ from 'lodash'; -import { ClosedGroup, getMessageQueue } from '..'; -import { ConversationTypeEnum } from '../../models/types'; +import _, { isFinite, isNumber } from 'lodash'; import { addKeyPairToCacheAndDBIfNeeded } from '../../receiver/closedGroups'; import { ECKeyPair } from '../../receiver/keypairs'; import { openConversationWithMessages } from '../../state/ducks/conversations'; @@ -8,6 +6,7 @@ import { updateConfirmModal } from '../../state/ducks/modalDialog'; import { getSwarmPollingInstance } from '../apis/snode_api'; import { SnodeNamespaces } from '../apis/snode_api/namespaces'; import { generateClosedGroupPublicKey, generateCurve25519KeyPairWithoutPrefix } from '../crypto'; +import { ClosedGroup, GroupInfo } from '../group/closed-group'; import { ClosedGroupNewMessage, ClosedGroupNewMessageParams, @@ -15,17 +14,22 @@ import { import { PubKey } from '../types'; import { UserUtils } from '../utils'; import { forceSyncConfigurationNowIfNeeded } from '../utils/sync/syncUtils'; -import { getConversationController } from './ConversationController'; - -export async function createClosedGroup(groupName: string, members: Array, isV3: boolean) { - const setOfMembers = new Set(members); +import { ConvoHub } from './ConversationController'; +import { ConversationTypeEnum } from '../../models/types'; +import { NetworkTime } from '../../util/NetworkTime'; +import { MessageQueue } from '../sending'; - if (isV3) { - throw new Error('groupv3 is not supported yet'); - } +/** + * Creates a brand new closed group from user supplied details. This function generates a new identityKeyPair so cannot be used to restore a closed group. + * @param groupName the name of this closed group + * @param members the initial members of this closed group + */ +export async function createClosedGroup(groupName: string, members: Array) { + // this is all legacy group logic. + // TODO: To be removed + const setOfMembers = new Set(members); const us = UserUtils.getOurPubKeyStrFromCache(); - const groupPublicKey = await generateClosedGroupPublicKey(); const encryptionKeyPair = await generateCurve25519KeyPairWithoutPrefix(); @@ -34,11 +38,10 @@ export async function createClosedGroup(groupName: string, members: Array inviteResult !== false); + const allInvitesSent = _.every(inviteResults, inviteResult => { + return isNumber(inviteResult) && isFinite(inviteResult); + }); if (allInvitesSent) { if (isRetry) { @@ -169,14 +174,12 @@ async function sendToGroupMembers( inviteResults.forEach((result, index) => { const member = listOfMembers[index]; // group invite must always contain the admin member. - if (result !== true || admins.includes(member)) { + if (!result || admins.includes(member)) { membersToResend.push(member); } }); const namesOfMembersToResend = membersToResend.map( - m => - getConversationController().get(m)?.getNicknameOrRealUsernameOrPlaceholder() || - window.i18n('unknown') + m => ConvoHub.use().get(m)?.getNicknameOrRealUsernameOrPlaceholder() || window.i18n('unknown') ); if (membersToResend.length < 1) { @@ -217,6 +220,8 @@ function createInvitePromises( admins: Array, encryptionKeyPair: ECKeyPair ) { + const createAtNetworkTimestamp = NetworkTime.now(); + return listOfMembers.map(async m => { const messageParams: ClosedGroupNewMessageParams = { groupId: groupPublicKey, @@ -224,15 +229,15 @@ function createInvitePromises( members: listOfMembers, admins, keypair: encryptionKeyPair, - timestamp: Date.now(), + createAtNetworkTimestamp, expirationType: null, // we keep that one **not** expiring expireTimer: 0, }; const message = new ClosedGroupNewMessage(messageParams); - return getMessageQueue().sendToPubKeyNonDurably({ + return MessageQueue.use().sendTo1o1NonDurably({ pubkey: PubKey.cast(m), message, - namespace: SnodeNamespaces.UserMessages, + namespace: SnodeNamespaces.Default, }); }); } diff --git a/ts/session/conversations/index.ts b/ts/session/conversations/index.ts index fd48f2bf8a..a338ab1b7f 100644 --- a/ts/session/conversations/index.ts +++ b/ts/session/conversations/index.ts @@ -1,3 +1,3 @@ -import { getConversationController } from './ConversationController'; +import { ConvoHub } from './ConversationController'; -export { getConversationController }; +export { ConvoHub }; diff --git a/ts/session/crypto/DecryptedAttachmentsManager.ts b/ts/session/crypto/DecryptedAttachmentsManager.ts index abf63ad60a..698603c79b 100644 --- a/ts/session/crypto/DecryptedAttachmentsManager.ts +++ b/ts/session/crypto/DecryptedAttachmentsManager.ts @@ -21,13 +21,13 @@ import { } from '../../types/MessageAttachment'; import { decryptAttachmentBufferRenderer } from '../../util/local_attachments_encrypter'; -export const urlToDecryptedBlobMap = new Map< +const urlToDecryptedBlobMap = new Map< string, { decrypted: string; lastAccessTimestamp: number; forceRetain: boolean } >(); -export const urlToDecryptingPromise = new Map>(); +const urlToDecryptingPromise = new Map>(); -export const cleanUpOldDecryptedMedias = () => { +const cleanUpOldDecryptedMedias = () => { const currentTimestamp = Date.now(); let countCleaned = 0; let countKept = 0; @@ -56,19 +56,19 @@ export const cleanUpOldDecryptedMedias = () => { ); }; -export const getLocalAttachmentPath = () => { +const getLocalAttachmentPath = () => { return getAttachmentPath(); }; -export const getAbsoluteAttachmentPath = (url: string) => { +const getAbsoluteAttachmentPath = (url: string) => { return msgGetAbsoluteAttachmentPath(url); }; -export const readFileContent = async (url: string) => { +const readFileContent = async (url: string) => { return fse.readFile(url); }; -export const getDecryptedMediaUrl = async ( +const getDecryptedMediaUrl = async ( url: string, contentType: string, isAvatar: boolean @@ -84,9 +84,9 @@ export const getDecryptedMediaUrl = async ( if ( (isAbsolute && - exports.getLocalAttachmentPath && - url.startsWith(exports.getLocalAttachmentPath())) || - fse.pathExistsSync(exports.getAbsoluteAttachmentPath(url)) + DecryptedAttachmentsManager.getLocalAttachmentPath && + url.startsWith(DecryptedAttachmentsManager.getLocalAttachmentPath())) || + fse.pathExistsSync(DecryptedAttachmentsManager.getAbsoluteAttachmentPath(url)) ) { // this is a file encoded by session on our current attachments path. // we consider the file is encrypted. @@ -117,10 +117,11 @@ export const getDecryptedMediaUrl = async ( // window.log.debug('about to read and decrypt file :', url, path.isAbsolute(url)); try { const absUrl = path.isAbsolute(url) ? url : getAbsoluteAttachmentPath(url); - const encryptedFileContent = await readFileContent(absUrl); + const encryptedFileContent = await DecryptedAttachmentsManager.readFileContent(absUrl); const decryptedContent = await decryptAttachmentBufferRenderer( encryptedFileContent.buffer ); + if (decryptedContent?.length) { const arrayBuffer = decryptedContent.buffer; const obj = makeObjectUrl(arrayBuffer, contentType); @@ -159,14 +160,17 @@ export const getDecryptedMediaUrl = async ( * * Returns the already decrypted URL or null */ -export const getAlreadyDecryptedMediaUrl = (url: string): string | null => { +const getAlreadyDecryptedMediaUrl = (url: string): string | null => { if (!url) { return null; } if (url.startsWith('blob:')) { return url; } - if (exports.getLocalAttachmentPath() && url.startsWith(exports.getLocalAttachmentPath())) { + if ( + DecryptedAttachmentsManager.getLocalAttachmentPath() && + url.startsWith(DecryptedAttachmentsManager.getLocalAttachmentPath()) + ) { if (urlToDecryptedBlobMap.has(url)) { const existing = urlToDecryptedBlobMap.get(url); @@ -182,7 +186,7 @@ export const getAlreadyDecryptedMediaUrl = (url: string): string | null => { return null; }; -export const getDecryptedBlob = async (url: string, contentType: string): Promise => { +const getDecryptedBlob = async (url: string, contentType: string): Promise => { const decryptedUrl = await getDecryptedMediaUrl(url, contentType, false); return urlToBlob(decryptedUrl); }; @@ -190,7 +194,18 @@ export const getDecryptedBlob = async (url: string, contentType: string): Promis /** * This function should only be used for testing purpose */ -export const resetDecryptedUrlForTesting = () => { +const resetDecryptedUrlForTesting = () => { urlToDecryptedBlobMap.clear(); urlToDecryptingPromise.clear(); }; + +export const DecryptedAttachmentsManager = { + resetDecryptedUrlForTesting, + getDecryptedBlob, + getAlreadyDecryptedMediaUrl, + getLocalAttachmentPath, + getAbsoluteAttachmentPath, + cleanUpOldDecryptedMedias, + getDecryptedMediaUrl, + readFileContent, +}; diff --git a/ts/session/crypto/MessageEncrypter.ts b/ts/session/crypto/MessageEncrypter.ts index 66eff65173..98a53a4d6d 100644 --- a/ts/session/crypto/MessageEncrypter.ts +++ b/ts/session/crypto/MessageEncrypter.ts @@ -1,69 +1,94 @@ +import { GroupPubkeyType } from 'libsession_util_nodejs'; +import { isEmpty } from 'lodash'; +import { concatUInt8Array, getSodiumRenderer } from '.'; +import { Data } from '../../data/data'; import { SignalService } from '../../protobuf'; +import { assertUnreachable } from '../../types/sqlSharedTypes'; +import { MetaGroupWrapperActions } from '../../webworker/workers/browser/libsession_worker_interface'; import { PubKey } from '../types'; -import { concatUInt8Array, getSodiumRenderer, MessageEncrypter } from '.'; -import { fromHexToArray } from '../utils/String'; -import { Data } from '../../data/data'; import { UserUtils } from '../utils'; +import { fromHexToArray } from '../utils/String'; +import { SigningFailed } from '../utils/errors'; import { addMessagePadding } from './BufferPadding'; -export { concatUInt8Array, getSodiumRenderer }; - type EncryptResult = { envelopeType: SignalService.Envelope.Type; cipherText: Uint8Array; }; +async function encryptWithLibSession(destination: GroupPubkeyType, plainText: Uint8Array) { + try { + return (await MetaGroupWrapperActions.encryptMessages(destination, [plainText]))[0]; + } catch (e) { + window.log.warn('encrypt message for group failed with', e.message); + throw new SigningFailed(e.message); + } +} + +async function encryptForLegacyGroup(destination: PubKey, plainText: Uint8Array) { + const hexEncryptionKeyPair = await Data.getLatestClosedGroupEncryptionKeyPair(destination.key); + if (!hexEncryptionKeyPair) { + window?.log?.warn("Couldn't get key pair for closed group during encryption"); + throw new Error("Couldn't get key pair for closed group"); + } + + const destinationX25519Pk = PubKey.cast(hexEncryptionKeyPair.publicHex); + + const cipherTextClosedGroup = await MessageEncrypter.encryptUsingSessionProtocol( + destinationX25519Pk, + plainText + ); + + return { + envelopeType: SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE, + cipherText: cipherTextClosedGroup, + }; +} + /** - * Encrypt `plainTextBuffer` with given `encryptionType` for `device`. + * Encrypt `plainTextBuffer` with given `encryptionType` for `destination`. * - * @param device The device `PubKey` to encrypt for. + * @param destination The device `PubKey` to encrypt for. * @param plainTextBuffer The unpadded plaintext buffer. It will be padded * @param encryptionType The type of encryption. * @returns The envelope type and the base64 encoded cipher text */ -export async function encrypt( - device: PubKey, +// eslint-disable-next-line consistent-return +async function encrypt( + destination: PubKey, plainTextBuffer: Uint8Array, encryptionType: SignalService.Envelope.Type ): Promise { const { CLOSED_GROUP_MESSAGE, SESSION_MESSAGE } = SignalService.Envelope.Type; - - if (encryptionType !== CLOSED_GROUP_MESSAGE && encryptionType !== SESSION_MESSAGE) { - throw new Error(`Invalid encryption type:${encryptionType}`); - } - - const encryptForClosedGroup = encryptionType === CLOSED_GROUP_MESSAGE; - const plainText = addMessagePadding(plainTextBuffer); - - if (encryptForClosedGroup) { - // window?.log?.info( - // 'Encrypting message with SessionProtocol and envelope type is CLOSED_GROUP_MESSAGE' - // ); - const hexEncryptionKeyPair = await Data.getLatestClosedGroupEncryptionKeyPair(device.key); - if (!hexEncryptionKeyPair) { - window?.log?.warn("Couldn't get key pair for closed group during encryption"); - throw new Error("Couldn't get key pair for closed group"); + const plainTextPadded = addMessagePadding(plainTextBuffer); + + switch (encryptionType) { + case SESSION_MESSAGE: { + const cipherText = await MessageEncrypter.encryptUsingSessionProtocol( + PubKey.cast(destination.key), + plainTextPadded + ); + return { envelopeType: SESSION_MESSAGE, cipherText }; } - const hexPubFromECKeyPair = PubKey.cast(hexEncryptionKeyPair.publicHex); - - const cipherTextClosedGroup = await MessageEncrypter.encryptUsingSessionProtocol( - hexPubFromECKeyPair, - plainText - ); + case CLOSED_GROUP_MESSAGE: { + const groupPk = destination.key; + if (PubKey.is03Pubkey(groupPk)) { + return { + envelopeType: CLOSED_GROUP_MESSAGE, + cipherText: await encryptWithLibSession(groupPk, plainTextBuffer), + }; + } - return { - envelopeType: CLOSED_GROUP_MESSAGE, - cipherText: cipherTextClosedGroup, - }; + return encryptForLegacyGroup(destination, plainTextPadded); // not padding it again, it is already done by libsession + } + default: + assertUnreachable(encryptionType, 'MessageEncrypter encrypt unreachable case'); } - const cipherText = await MessageEncrypter.encryptUsingSessionProtocol(device, plainText); - - return { envelopeType: SESSION_MESSAGE, cipherText }; } -export async function encryptUsingSessionProtocol( - recipientHexEncodedX25519PublicKey: PubKey, +async function encryptUsingSessionProtocol( + destinationX25519Pk: PubKey, plaintext: Uint8Array ): Promise { const userED25519KeyPairHex = await UserUtils.getUserED25519KeyPair(); @@ -76,9 +101,9 @@ export async function encryptUsingSessionProtocol( } const sodium = await getSodiumRenderer(); - // window?.log?.info('encryptUsingSessionProtocol for ', recipientHexEncodedX25519PublicKey.key); - - const recipientX25519PublicKey = recipientHexEncodedX25519PublicKey.withoutPrefixToArray(); + const recipientX25519PublicKey = fromHexToArray( + PubKey.removePrefixIfNeeded(destinationX25519Pk.key) + ); const userED25519PubKeyBytes = fromHexToArray(userED25519KeyPairHex.pubKey); const userED25519SecretKeyBytes = fromHexToArray(userED25519KeyPairHex.privKey); @@ -90,15 +115,20 @@ export async function encryptUsingSessionProtocol( ); const signature = sodium.crypto_sign_detached(verificationData, userED25519SecretKeyBytes); - if (!signature || signature.length === 0) { + if (isEmpty(signature)) { throw new Error("Couldn't sign message"); } const plaintextWithMetadata = concatUInt8Array(plaintext, userED25519PubKeyBytes, signature); const ciphertext = sodium.crypto_box_seal(plaintextWithMetadata, recipientX25519PublicKey); - if (!ciphertext) { + if (isEmpty(ciphertext)) { throw new Error("Couldn't encrypt message."); } return ciphertext; } + +export const MessageEncrypter = { + encryptUsingSessionProtocol, + encrypt, +}; diff --git a/ts/session/crypto/index.ts b/ts/session/crypto/index.ts index 6581fe5442..be2d461f0d 100644 --- a/ts/session/crypto/index.ts +++ b/ts/session/crypto/index.ts @@ -1,16 +1,15 @@ import crypto from 'crypto'; import libsodiumwrappers from 'libsodium-wrappers-sumo'; -import * as DecryptedAttachmentsManager from './DecryptedAttachmentsManager'; -import * as MessageEncrypter from './MessageEncrypter'; - import { ECKeyPair } from '../../receiver/keypairs'; import { toHex } from '../utils/String'; -export { DecryptedAttachmentsManager, MessageEncrypter }; - export type LibSodiumWrappers = typeof libsodiumwrappers; +export type WithLibSodiumWrappers = { + sodium: LibSodiumWrappers; +}; + export async function getSodiumRenderer(): Promise { await libsodiumwrappers.ready; return libsodiumwrappers; @@ -52,21 +51,6 @@ export async function generateClosedGroupPublicKey() { return toHex(prependedX25519PublicKey); } -/** - * Returns a generated ed25519 hex with a public key being of length 66 and starting with 03. - */ -export async function generateGroupV3Keypair() { - const sodium = await getSodiumRenderer(); - const ed25519KeyPair = sodium.crypto_sign_keypair(); - - const publicKey = new Uint8Array(ed25519KeyPair.publicKey); - const preprendedPubkey = new Uint8Array(33); - preprendedPubkey.set(publicKey, 1); - preprendedPubkey[0] = 3; - - return { pubkey: toHex(preprendedPubkey), privateKey: toHex(ed25519KeyPair.privateKey) }; -} - /** * Returns a generated curve25519 keypair without the prefix on the public key. * This should be used for the generation of encryption keypairs for a closed group diff --git a/ts/session/disappearing_messages/index.ts b/ts/session/disappearing_messages/index.ts index 54ecf51014..24de18e061 100644 --- a/ts/session/disappearing_messages/index.ts +++ b/ts/session/disappearing_messages/index.ts @@ -9,8 +9,7 @@ import { MessageModel } from '../../models/message'; import { SignalService } from '../../protobuf'; import { ReleasedFeatures } from '../../util/releaseFeature'; import { ExpiringDetails, expireMessagesOnSnode } from '../apis/snode_api/expireRequest'; -import { GetNetworkTime } from '../apis/snode_api/getNetworkTime'; -import { getConversationController } from '../conversations'; +import { ConvoHub } from '../conversations'; import { isValidUnixTimestamp } from '../utils/Timestamps'; import { UpdateMsgExpirySwarm } from '../utils/job_runners/jobs/UpdateMsgExpirySwarmJob'; import { @@ -24,8 +23,10 @@ import { DisappearingMessageUpdate, ReadyToDisappearMsgUpdate, } from './types'; +import { PubKey } from '../types'; +import { NetworkTime } from '../../util/NetworkTime'; -async function destroyMessagesAndUpdateRedux( +export async function destroyMessagesAndUpdateRedux( messages: Array<{ conversationKey: string; messageId: string; @@ -59,7 +60,7 @@ async function destroyMessagesAndUpdateRedux( // trigger a refresh the last message for all those uniq conversation conversationWithChanges.forEach(convoIdToUpdate => { - getConversationController().get(convoIdToUpdate)?.updateLastMessage(); + ConvoHub.use().get(convoIdToUpdate)?.updateLastMessage(); }); } @@ -89,8 +90,8 @@ async function destroyExpiredMessages() { window.log.info('destroyExpiredMessages: convosToRefresh:', convosToRefresh); await Promise.all( convosToRefresh.map(async c => { - getConversationController().get(c)?.updateLastMessage(); - return getConversationController().get(c)?.refreshInMemoryDetails(); + ConvoHub.use().get(c)?.updateLastMessage(); + return ConvoHub.use().get(c)?.refreshInMemoryDetails(); }) ); } catch (error) { @@ -171,7 +172,7 @@ function setExpirationStartTimestamp( callLocation?: string, messageId?: string ): number | undefined { - let expirationStartTimestamp: number | undefined = GetNetworkTime.getNowWithNetworkOffset(); + let expirationStartTimestamp: number | undefined = NetworkTime.now(); if (callLocation) { // window.log.debug( @@ -388,7 +389,7 @@ async function checkForExpireUpdateInContentMessage( messageExpirationFromRetrieve, }; - // NOTE some platforms do not include the diappearing message values in the Data Message for sent messages so we have to trust the conversation settings until v2 is released + // NOTE some platforms do not include the disappearing message values in the Data Message for sent messages so we have to trust the conversation settings until v2 is released if ( !isDisappearingMessagesV2Released && !isLegacyConversationSettingMessage && @@ -456,7 +457,7 @@ function checkForExpiringOutgoingMessage(message: MessageModel, location?: strin expirationType && expireTimer > 0 && !message.getExpirationStartTimestamp() && - !(isGroupConvo && isControlMessage) + !(isGroupConvo && isControlMessage && !PubKey.is03Pubkey(convo.id)) ) { const expirationMode = changeToDisappearingConversationMode(convo, expirationType, expireTimer); @@ -543,12 +544,12 @@ function getMessageReadyToDisappear( ) { /** * Edge case: when we send a message before we poll for a message sent earlier, our convo volatile update will - * mark that incoming message as read right away (because it was sent earlier than our latest convolatile lastRead). + * mark that incoming message as read right away (because it was sent earlier than our latest convo volatile lastRead). * To take care of this case, we need to check if an incoming DaR message is in a read state but its expiration has not been updated yet. * The way we do it, is by checking that the swarm expiration is before (now + expireTimer). * If it looks like this expiration was not updated yet, we need to trigger a UpdateExpiryJob for that message. */ - const now = GetNetworkTime.getNowWithNetworkOffset(); + const now = NetworkTime.now(); const expirationNowPlusTimer = now + expireTimer * 1000; const msgExpirationWasAlreadyUpdated = messageExpirationFromRetrieve <= expirationNowPlusTimer; // Note: a message might be added even when it expired, but the periodic cleaning of expired message will pick it up and remove it soon enough @@ -700,12 +701,32 @@ async function updateMessageExpiriesOnSwarm(messages: Array) { } } +function getExpireDetailsForOutgoingMessage( + convo: ConversationModel, + createAtNetworkTimestamp: number +) { + const expireTimer = convo.getExpireTimer(); + const expireDetails = { + expirationType: DisappearingMessages.changeToDisappearingMessageType( + convo, + expireTimer, + convo.getExpirationMode() + ), + expireTimer, + expirationTimer: expireTimer, + messageExpirationFromRetrieve: expireTimer > 0 ? createAtNetworkTimestamp + expireTimer : null, + }; + + return expireDetails; +} + export const DisappearingMessages = { destroyMessagesAndUpdateRedux, initExpiringMessageListener, updateExpiringMessagesCheck, setExpirationStartTimestamp, changeToDisappearingMessageType, + getExpireDetailsForOutgoingMessage, changeToDisappearingConversationMode, forcedDeleteAfterReadMsgSetting, forcedDeleteAfterSendMsgSetting, diff --git a/ts/session/disappearing_messages/timerOptions.ts b/ts/session/disappearing_messages/timerOptions.ts index a8a9e307d6..856c95f6bc 100644 --- a/ts/session/disappearing_messages/timerOptions.ts +++ b/ts/session/disappearing_messages/timerOptions.ts @@ -3,39 +3,54 @@ import { formatAbbreviatedExpireTimer, formatNonAbbreviatedExpireTimer, } from '../../util/i18n/formatting/expirationTimer'; -import { DURATION_SECONDS } from '../constants'; -type TimerOptionsEntry = { name: string; value: number }; +type TimerSeconds = + | 0 + | 5 + | 10 + | 30 + | 60 + | 300 + | 1800 + | 3600 + | 21600 + | 43200 + | 86400 + | 604800 + | 1209600; + +type TimerOptionsEntry = { name: string; value: TimerSeconds }; export type TimerOptionsArray = Array; -const VALUES: Array = [ +// prettier-ignore +const VALUES: Array = [ /** off */ 0, /** 5 seconds */ - 5 * DURATION_SECONDS.SECONDS, + 5, /** 10 seconds */ - 10 * DURATION_SECONDS.SECONDS, + 10, /** 30 seconds */ - 30 * DURATION_SECONDS.SECONDS, + 30, /** 1 minute */ - 1 * DURATION_SECONDS.MINUTES, + 60, /** 5 minutes */ - 5 * DURATION_SECONDS.MINUTES, + 300, /** 30 minutes */ - 30 * DURATION_SECONDS.MINUTES, + 1800, /** 1 hour */ - 1 * DURATION_SECONDS.HOURS, + 3600, /** 6 hours */ - 6 * DURATION_SECONDS.HOURS, + 21600, /** 12 hours */ - 12 * DURATION_SECONDS.HOURS, + 43200, /** 1 day */ - 1 * DURATION_SECONDS.DAYS, + 86400, /** 1 week */ - 1 * DURATION_SECONDS.WEEKS, + 604800, /** 2 weeks */ - 2 * DURATION_SECONDS.WEEKS, -]; + 1209600, +] as const; function getName(seconds = 0) { if (seconds === 0) { diff --git a/ts/session/disappearing_messages/types.ts b/ts/session/disappearing_messages/types.ts index 39c5574cd7..da671c52e2 100644 --- a/ts/session/disappearing_messages/types.ts +++ b/ts/session/disappearing_messages/types.ts @@ -36,6 +36,8 @@ export type DisappearingMessageUpdate = { messageExpirationFromRetrieve: number | null; }; +export type WithDisappearingMessageUpdate = { expireUpdate: DisappearingMessageUpdate | null }; + export type ReadyToDisappearMsgUpdate = Pick< DisappearingMessageUpdate, 'expirationType' | 'expirationTimer' | 'messageExpirationFromRetrieve' diff --git a/ts/session/group/closed-group.ts b/ts/session/group/closed-group.ts index dd81856062..bf89c4a5be 100644 --- a/ts/session/group/closed-group.ts +++ b/ts/session/group/closed-group.ts @@ -1,25 +1,27 @@ import _ from 'lodash'; import { v4 as uuidv4 } from 'uuid'; -import { getMessageQueue } from '..'; import { Data } from '../../data/data'; import { ConversationModel } from '../../models/conversation'; import { ConversationAttributes } from '../../models/conversationAttributes'; import { MessageModel } from '../../models/message'; -import { MessageAttributesOptionals } from '../../models/messageType'; +import { MessageAttributesOptionals, MessageGroupUpdate } from '../../models/messageType'; import { SignalService } from '../../protobuf'; import { addKeyPairToCacheAndDBIfNeeded, distributingClosedGroupEncryptionKeyPairs, } from '../../receiver/closedGroups'; import { ECKeyPair } from '../../receiver/keypairs'; -import { GetNetworkTime } from '../apis/snode_api/getNetworkTime'; +import { PropsForGroupUpdateType } from '../../state/ducks/conversations'; import { SnodeNamespaces } from '../apis/snode_api/namespaces'; -import { getConversationController } from '../conversations'; +import { ConvoHub } from '../conversations'; import { generateCurve25519KeyPairWithoutPrefix } from '../crypto'; -import { encryptUsingSessionProtocol } from '../crypto/MessageEncrypter'; +import { MessageEncrypter } from '../crypto/MessageEncrypter'; import { DisappearingMessages } from '../disappearing_messages'; -import { DisappearAfterSendOnly, DisappearingMessageUpdate } from '../disappearing_messages/types'; +import { + DisappearAfterSendOnly, + WithDisappearingMessageUpdate, +} from '../disappearing_messages/types'; import { ClosedGroupAddedMembersMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupAddedMembersMessage'; import { ClosedGroupEncryptionPairMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupEncryptionPairMessage'; import { ClosedGroupNameChangeMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupNameChangeMessage'; @@ -28,7 +30,10 @@ import { ClosedGroupRemovedMembersMessage } from '../messages/outgoing/controlMe import { PubKey } from '../types'; import { UserUtils } from '../utils'; import { fromHexToArray, toHex } from '../utils/String'; +import { PreConditionFailed } from '../utils/errors'; import { ConversationTypeEnum } from '../../models/types'; +import { NetworkTime } from '../../util/NetworkTime'; +import { MessageQueue } from '../sending'; export type GroupInfo = { id: string; @@ -41,15 +46,7 @@ export type GroupInfo = { admins?: Array; }; -export interface GroupDiff extends MemberChanges { - newName?: string; -} - -export interface MemberChanges { - joiningMembers?: Array; - leavingMembers?: Array; - kickedMembers?: Array; -} +export type GroupDiff = PropsForGroupUpdateType; /** * This function is only called when the local user makes a change to a group. @@ -60,16 +57,16 @@ export interface MemberChanges { * @param members the new members (or just pass the old one if nothing changed) * @returns nothing */ -export async function initiateClosedGroupUpdate( +async function initiateClosedGroupUpdate( groupId: string, groupName: string, - members: Array + members: Array | null ) { - const isGroupV3 = PubKey.isClosedGroupV3(groupId); - const convo = await getConversationController().getOrCreateAndWait( - groupId, - isGroupV3 ? ConversationTypeEnum.GROUPV3 : ConversationTypeEnum.GROUP - ); + const isGroupV2 = PubKey.is03Pubkey(groupId); + if (isGroupV2) { + throw new PreConditionFailed('initiateClosedGroupUpdate does not handle closed groupv2'); + } + const convo = await ConvoHub.use().getOrCreateAndWait(groupId, ConversationTypeEnum.GROUP); const expirationType = DisappearingMessages.changeToDisappearingMessageType( convo, @@ -83,28 +80,37 @@ export async function initiateClosedGroupUpdate( throw new Error(`Groups cannot be deleteAfterRead`); } + const updatedMembers = members === null ? convo.getGroupMembers() : members; + // do not give an admins field here. We don't want to be able to update admins and // updateOrCreateClosedGroup() will update them if given the choice. const groupDetails: GroupInfo = { id: groupId, name: groupName, - members, + members: updatedMembers, // remove from the zombies list the zombies not which are not in the group anymore - zombies: convo.get('zombies')?.filter(z => members.includes(z)), + zombies: convo.getGroupZombies()?.filter(z => updatedMembers.includes(z)), activeAt: Date.now(), expirationType, expireTimer, }; - const diff = buildGroupDiff(convo, groupDetails); - + const diff = buildGroupV1Diff(convo, groupDetails); await updateOrCreateClosedGroup(groupDetails); + if (!diff) { + window.log.warn('buildGroupV1Diff returned null'); + await convo.commit(); + + return; + } + const updateObj: GroupInfo = { id: groupId, name: groupName, - members, - admins: convo.get('groupAdmins'), + members: updatedMembers, + admins: convo.getGroupAdmins(), + expireTimer: convo.get('expireTimer'), }; const sharedDetails = { @@ -113,10 +119,11 @@ export async function initiateClosedGroupUpdate( // Note: we agreed that legacy group control messages do not expire expireUpdate: null, convo, + markAlreadySent: false, }; - if (diff.newName?.length) { - const nameOnlyDiff: GroupDiff = _.pick(diff, 'newName'); + if (diff.type === 'name' && diff.newName?.length) { + const nameOnlyDiff: GroupDiff = _.pick(diff, ['type', 'newName']); const dbMessageName = await addUpdateMessage({ diff: nameOnlyDiff, @@ -125,29 +132,24 @@ export async function initiateClosedGroupUpdate( await sendNewName(convo, diff.newName, dbMessageName.id as string); } - if (diff.joiningMembers?.length) { - const joiningOnlyDiff: GroupDiff = _.pick(diff, 'joiningMembers'); + if (diff.type === 'add' && diff.added?.length) { + const joiningOnlyDiff: GroupDiff = _.pick(diff, ['type', 'added', 'withHistory']); const dbMessageAdded = await addUpdateMessage({ diff: joiningOnlyDiff, ...sharedDetails, }); - await sendAddedMembers(convo, diff.joiningMembers, dbMessageAdded.id as string, updateObj); + await sendAddedMembers(convo, diff.added, dbMessageAdded.id as string, updateObj); } - if (diff.leavingMembers?.length) { - const leavingOnlyDiff: GroupDiff = { kickedMembers: diff.leavingMembers }; + if (diff.type === 'kicked' && diff.kicked?.length) { + const leavingOnlyDiff: GroupDiff = _.pick(diff, ['type', 'kicked']); + const dbMessageLeaving = await addUpdateMessage({ diff: leavingOnlyDiff, ...sharedDetails, }); - const stillMembers = members; - await sendRemovedMembers( - convo, - diff.leavingMembers, - stillMembers, - dbMessageLeaving.id as string - ); + await sendRemovedMembers(convo, diff.kicked, updatedMembers, dbMessageLeaving.id as string); } await convo.commit(); } @@ -158,33 +160,38 @@ export async function addUpdateMessage({ sender, sentAt, expireUpdate, + markAlreadySent, }: { convo: ConversationModel; diff: GroupDiff; sender: string; sentAt: number; - expireUpdate: DisappearingMessageUpdate | null; -}): Promise { - const groupUpdate: any = {}; + markAlreadySent: boolean; +} & WithDisappearingMessageUpdate): Promise { + const groupUpdate: MessageGroupUpdate = {}; - if (diff.newName) { + if (diff.type === 'name' && diff.newName) { groupUpdate.name = diff.newName; - } - - if (diff.joiningMembers) { - groupUpdate.joined = diff.joiningMembers; - } - - if (diff.leavingMembers) { - groupUpdate.left = diff.leavingMembers; - } - - if (diff.kickedMembers) { - groupUpdate.kicked = diff.kickedMembers; + } else if (diff.type === 'add' && diff.added) { + if (diff.withHistory) { + groupUpdate.joinedWithHistory = diff.added; + } else { + groupUpdate.joined = diff.added; + } + } else if (diff.type === 'left' && diff.left) { + groupUpdate.left = diff.left; + } else if (diff.type === 'kicked' && diff.kicked) { + groupUpdate.kicked = diff.kicked; + } else if (diff.type === 'promoted' && diff.promoted) { + groupUpdate.promoted = diff.promoted; + } else if (diff.type === 'avatarChange') { + groupUpdate.avatarChange = true; + } else { + throw new Error('addUpdateMessage with unknown type of change'); } const isUs = UserUtils.isUsFromCache(sender); - const msgModel: MessageAttributesOptionals = { + const msgAttrs: MessageAttributesOptionals = { sent_at: sentAt, group_update: groupUpdate, source: sender, @@ -192,16 +199,26 @@ export async function addUpdateMessage({ type: isUs ? 'outgoing' : 'incoming', }; + /** + * When we receive an update from our linked device, it is an outgoing message + * but which was obviously already synced (as we got it). + * When that's the case we need to mark the message as sent right away, + * so the MessageStatus 'sending' state is not shown for the last message in the left pane. + */ + if (msgAttrs.type === 'outgoing' && markAlreadySent) { + msgAttrs.sent = true; + } + if (convo && expireUpdate && expireUpdate.expirationType && expireUpdate.expirationTimer > 0) { const { expirationTimer, expirationType, isLegacyDataMessage } = expireUpdate; - msgModel.expirationType = expirationType === 'deleteAfterSend' ? 'deleteAfterSend' : 'unknown'; - msgModel.expireTimer = msgModel.expirationType === 'deleteAfterSend' ? expirationTimer : 0; + msgAttrs.expirationType = expirationType === 'deleteAfterSend' ? 'deleteAfterSend' : 'unknown'; + msgAttrs.expireTimer = msgAttrs.expirationType === 'deleteAfterSend' ? expirationTimer : 0; // NOTE Triggers disappearing for an incoming groupUpdate message // TODO legacy messages support will be removed in a future release if (isLegacyDataMessage || expirationType === 'deleteAfterSend') { - msgModel.expirationStartTimestamp = DisappearingMessages.setExpirationStartTimestamp( + msgAttrs.expirationStartTimestamp = DisappearingMessages.setExpirationStartTimestamp( isLegacyDataMessage ? 'legacy' : expirationType === 'unknown' ? 'off' : expirationType, sentAt, 'addUpdateMessage' @@ -210,46 +227,50 @@ export async function addUpdateMessage({ } return isUs - ? convo.addSingleOutgoingMessage(msgModel) + ? convo.addSingleOutgoingMessage(msgAttrs) : convo.addSingleIncomingMessage({ - ...msgModel, + ...msgAttrs, source: sender, }); } -function buildGroupDiff(convo: ConversationModel, update: GroupInfo): GroupDiff { - const groupDiff: GroupDiff = {}; - - if (convo.get('displayNameInProfile') !== update.name) { - groupDiff.newName = update.name; +function buildGroupV1Diff(convo: ConversationModel, update: GroupInfo): GroupDiff | null { + if (convo.getRealSessionUsername() !== update.name) { + return { type: 'name', newName: update.name }; } - const oldMembers = convo.get('members'); - const oldZombies = convo.get('zombies'); + const oldMembers = convo.getGroupMembers(); + const oldZombies = convo.getGroupZombies(); const oldMembersWithZombies = _.uniq(oldMembers.concat(oldZombies)); const newMembersWithZombiesLeft = _.uniq(update.members.concat(update.zombies || [])); - const addedMembers = _.difference(newMembersWithZombiesLeft, oldMembersWithZombies); - if (addedMembers.length > 0) { - groupDiff.joiningMembers = addedMembers; + const added = _.difference(newMembersWithZombiesLeft, oldMembersWithZombies).filter( + PubKey.is05Pubkey + ); + if (added.length > 0) { + return { type: 'add', added, withHistory: false }; } // Check if anyone got kicked: - const removedMembers = _.difference(oldMembersWithZombies, newMembersWithZombiesLeft); + const removedMembers = _.difference(oldMembersWithZombies, newMembersWithZombiesLeft).filter( + PubKey.is05Pubkey + ); if (removedMembers.length > 0) { - groupDiff.leavingMembers = removedMembers; + return { type: 'kicked', kicked: removedMembers }; } - return groupDiff; + return null; } export async function updateOrCreateClosedGroup(details: GroupInfo) { + // const { id, expireTimer } = details; + const { id } = details; + if (PubKey.is03Pubkey(id)) { + throw new Error('updateOrCreateClosedGroup is only for legacy groups, not 03 groups'); + } - const conversation = await getConversationController().getOrCreateAndWait( - id, - ConversationTypeEnum.GROUP - ); + const conversation = await ConvoHub.use().getOrCreateAndWait(id, ConversationTypeEnum.GROUP); const updates: Pick< ConversationAttributes, @@ -257,7 +278,6 @@ export async function updateOrCreateClosedGroup(details: GroupInfo) { > = { displayNameInProfile: details.name, members: details.members, - // Note: legacy group to not support change of admins. type: ConversationTypeEnum.GROUP, active_at: details.activeAt ? details.activeAt : 0, left: !details.activeAt, @@ -283,16 +303,16 @@ async function sendNewName(convo: ConversationModel, name: string, messageId: st // Send the update to the group const nameChangeMessage = new ClosedGroupNameChangeMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: NetworkTime.now(), groupId, identifier: messageId, name, expirationType: null, // we keep that one **not** expiring expireTimer: 0, }); - await getMessageQueue().sendToGroup({ + await MessageQueue.use().sendToGroup({ message: nameChangeMessage, - namespace: SnodeNamespaces.ClosedGroupMessage, + namespace: SnodeNamespaces.LegacyClosedGroup, }); } @@ -319,21 +339,21 @@ async function sendAddedMembers( const encryptionKeyPair = ECKeyPair.fromHexKeyPair(hexEncryptionKeyPair); // Send the Added Members message to the group (only members already in the group will get it) const closedGroupControlMessage = new ClosedGroupAddedMembersMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: NetworkTime.now(), groupId, addedMembers, identifier: messageId, expirationType: null, // we keep that one **not** expiring expireTimer: 0, }); - await getMessageQueue().sendToGroup({ + await MessageQueue.use().sendToGroup({ message: closedGroupControlMessage, - namespace: SnodeNamespaces.ClosedGroupMessage, + namespace: SnodeNamespaces.LegacyClosedGroup, }); // Send closed group update messages to any new members individually const newClosedGroupUpdate = new ClosedGroupNewMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: NetworkTime.now(), name: groupName, groupId, admins, @@ -345,18 +365,18 @@ async function sendAddedMembers( }); const promises = addedMembers.map(async m => { - await getConversationController().getOrCreateAndWait(m, ConversationTypeEnum.PRIVATE); + await ConvoHub.use().getOrCreateAndWait(m, ConversationTypeEnum.PRIVATE); const memberPubKey = PubKey.cast(m); - await getMessageQueue().sendToPubKey( + await MessageQueue.use().sendToPubKey( memberPubKey, newClosedGroupUpdate, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); }); await Promise.all(promises); } -export async function sendRemovedMembers( +async function sendRemovedMembers( convo: ConversationModel, removedMembers: Array, stillMembers: Array, @@ -367,7 +387,7 @@ export async function sendRemovedMembers( return; } const ourNumber = UserUtils.getOurPubKeyFromCache(); - const admins = convo.get('groupAdmins') || []; + const admins = convo.getGroupAdmins() || []; const groupId = convo.get('id'); const isCurrentUserAdmin = admins.includes(ourNumber.key); @@ -380,7 +400,7 @@ export async function sendRemovedMembers( } // Send the update to the group and generate + distribute a new encryption key pair if needed const mainClosedGroupControlMessage = new ClosedGroupRemovedMembersMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: NetworkTime.now(), groupId, removedMembers, identifier: messageId, @@ -388,9 +408,9 @@ export async function sendRemovedMembers( expireTimer: 0, }); // Send the group update, and only once sent, generate and distribute a new encryption key pair if needed - await getMessageQueue().sendToGroup({ + await MessageQueue.use().sendToGroup({ message: mainClosedGroupControlMessage, - namespace: SnodeNamespaces.ClosedGroupMessage, + namespace: SnodeNamespaces.LegacyClosedGroup, sentCb: async () => { if (isCurrentUserAdmin) { // we send the new encryption key only to members already here before the update @@ -408,7 +428,7 @@ async function generateAndSendNewEncryptionKeyPair( groupPublicKey: string, targetMembers: Array ) { - const groupConvo = getConversationController().get(groupPublicKey); + const groupConvo = ConvoHub.use().get(groupPublicKey); const groupId = fromHexToArray(groupPublicKey); if (!groupConvo) { @@ -427,7 +447,7 @@ async function generateAndSendNewEncryptionKeyPair( } const ourNumber = UserUtils.getOurPubKeyStrFromCache(); - if (!groupConvo.get('groupAdmins')?.includes(ourNumber)) { + if (!groupConvo.getGroupAdmins().includes(ourNumber)) { window?.log?.warn('generateAndSendNewEncryptionKeyPair: cannot send it as a non admin'); return; } @@ -444,7 +464,7 @@ async function generateAndSendNewEncryptionKeyPair( const keypairsMessage = new ClosedGroupEncryptionPairMessage({ groupId: toHex(groupId), - timestamp: GetNetworkTime.getNowWithNetworkOffset(), + createAtNetworkTimestamp: NetworkTime.now(), encryptedKeyPairs: wrappers, expirationType: null, // we keep that one **not** expiring expireTimer: 0, @@ -460,18 +480,18 @@ async function generateAndSendNewEncryptionKeyPair( distributingClosedGroupEncryptionKeyPairs.delete(toHex(groupId)); await addKeyPairToCacheAndDBIfNeeded(toHex(groupId), newKeyPair.toHexKeyPair()); - await groupConvo?.commit(); // this makes sure to include the new encryption keypair in the libsession usergroup wrapper + await groupConvo?.commit(); // this makes sure to include the new encryption key pair in the libsession user group wrapper }; // this is to be sent to the group pubkey address - await getMessageQueue().sendToGroup({ + await MessageQueue.use().sendToGroup({ message: keypairsMessage, - namespace: SnodeNamespaces.ClosedGroupMessage, + namespace: SnodeNamespaces.LegacyClosedGroup, sentCb: messageSentCallback, }); } -export async function buildEncryptionKeyPairWrappers( +async function buildEncryptionKeyPairWrappers( targetMembers: Array, encryptionKeyPair: ECKeyPair ) { @@ -491,7 +511,10 @@ export async function buildEncryptionKeyPairWrappers( const wrappers = await Promise.all( targetMembers.map(async pubkey => { - const ciphertext = await encryptUsingSessionProtocol(PubKey.cast(pubkey), plaintext); + const ciphertext = await MessageEncrypter.encryptUsingSessionProtocol( + PubKey.cast(pubkey), + plaintext + ); return new SignalService.DataMessage.ClosedGroupControlMessage.KeyPairWrapper({ encryptedKeyPair: ciphertext, publicKey: fromHexToArray(pubkey), @@ -500,3 +523,10 @@ export async function buildEncryptionKeyPairWrappers( ); return wrappers; } + +export const ClosedGroup = { + addUpdateMessage, + initiateClosedGroupUpdate, + updateOrCreateClosedGroup, + buildEncryptionKeyPairWrappers, +}; diff --git a/ts/session/group/open-group.ts b/ts/session/group/open-group.ts index 57ac056100..3026af5d53 100644 --- a/ts/session/group/open-group.ts +++ b/ts/session/group/open-group.ts @@ -4,7 +4,7 @@ import { MIME } from '../../types'; import { urlToBlob } from '../../types/attachments/VisualAttachment'; import { processNewAttachment } from '../../types/MessageAttachment'; import { uploadImageForRoomSogsV3 } from '../apis/open_group_api/sogsv3/sogsV3RoomImage'; -import { getConversationController } from '../conversations'; +import { ConvoHub } from '../conversations'; export type OpenGroupUpdateAvatar = { objectUrl: string | null }; @@ -19,7 +19,7 @@ export async function initiateOpenGroupUpdate( ) { // we actually do not change the groupName just yet here, serverSide. This is just done client side. Maybe something to allow in a later release. // For now, the UI is actually not allowing changing the room name so we do not care. - const convo = getConversationController().get(groupId); + const convo = ConvoHub.use().get(groupId); if (!convo?.isPublic()) { throw new Error('initiateOpenGroupUpdate can only be used for communities'); @@ -68,7 +68,7 @@ export async function initiateOpenGroupUpdate( contentType: MIME.IMAGE_UNKNOWN, // contentType is mostly used to generate previews and screenshot. We do not care for those in this case. }); await convo.setSessionProfile({ - displayName: groupName || convo.get('displayNameInProfile') || window.i18n('unknown'), + displayName: groupName || convo.getRealSessionUsername() || window.i18n('unknown'), avatarPath: upgraded.path, avatarImageId, }); diff --git a/ts/session/index.ts b/ts/session/index.ts index 3804544466..20ac68fbe4 100644 --- a/ts/session/index.ts +++ b/ts/session/index.ts @@ -6,6 +6,4 @@ import * as Sending from './sending'; import * as Constants from './constants'; import * as ClosedGroup from './group/closed-group'; -const getMessageQueue = Sending.getMessageQueue; - -export { Conversations, Messages, Utils, Types, Sending, Constants, ClosedGroup, getMessageQueue }; +export { Conversations, Messages, Utils, Types, Sending, Constants, ClosedGroup }; diff --git a/ts/session/messages/incoming/IncomingMessage.ts b/ts/session/messages/incoming/IncomingMessage.ts index 4e08f1d55b..d7b042a33f 100644 --- a/ts/session/messages/incoming/IncomingMessage.ts +++ b/ts/session/messages/incoming/IncomingMessage.ts @@ -6,11 +6,9 @@ type IncomingMessageAvailableTypes = | SignalService.CallMessage | SignalService.ReceiptMessage | SignalService.TypingMessage - | SignalService.ConfigurationMessage | SignalService.DataExtractionNotification | SignalService.Unsend - | SignalService.MessageRequestResponse - | SignalService.ISharedConfigMessage; + | SignalService.MessageRequestResponse; export class IncomingMessage { public readonly envelopeTimestamp: number; diff --git a/ts/session/messages/message_factory/group/groupUpdateMessageFactory.ts b/ts/session/messages/message_factory/group/groupUpdateMessageFactory.ts new file mode 100644 index 0000000000..97da4af52d --- /dev/null +++ b/ts/session/messages/message_factory/group/groupUpdateMessageFactory.ts @@ -0,0 +1,155 @@ +import { Uint8ArrayLen64, WithGroupPubkey } from 'libsession_util_nodejs'; +import { getSodiumRenderer } from '../../../crypto'; +import { DisappearingMessages } from '../../../disappearing_messages'; + +import { GroupUpdateMemberChangeMessage } from '../../outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberChangeMessage'; +import { ConversationModel } from '../../../../models/conversation'; +import { + WithAddWithHistoryMembers, + WithAddWithoutHistoryMembers, + WithFromMemberLeftMessage, + WithPromotedMembers, + WithRemoveMembers, +} from '../../../types/with'; + +/** + * Return the control messages to be pushed to the group's swarm. + * Those are not going to change the state, they are just here as a "notification". + * i.e. "Alice was removed from the group" + */ +async function getRemovedControlMessage({ + convo, + groupPk, + removed, + adminSecretKey, + createAtNetworkTimestamp, + fromMemberLeftMessage, + dbMsgIdentifier, +}: WithFromMemberLeftMessage & + WithRemoveMembers & + WithGroupPubkey & { + convo: ConversationModel; + adminSecretKey: Uint8ArrayLen64; + createAtNetworkTimestamp: number; + dbMsgIdentifier: string; + }) { + const sodium = await getSodiumRenderer(); + + if (fromMemberLeftMessage || !removed.length) { + return null; + } + + return new GroupUpdateMemberChangeMessage({ + identifier: dbMsgIdentifier, + removed, + groupPk, + typeOfChange: 'removed', + createAtNetworkTimestamp, + secretKey: adminSecretKey, + sodium, + ...DisappearingMessages.getExpireDetailsForOutgoingMessage(convo, createAtNetworkTimestamp), + }); +} + +async function getWithoutHistoryControlMessage({ + convo, + withoutHistory, + groupPk, + adminSecretKey, + createAtNetworkTimestamp, + dbMsgIdentifier, +}: WithAddWithoutHistoryMembers & + WithGroupPubkey & { + dbMsgIdentifier: string; + convo: ConversationModel; + adminSecretKey: Uint8ArrayLen64; + createAtNetworkTimestamp: number; + }) { + const sodium = await getSodiumRenderer(); + + if (!withoutHistory.length) { + return null; + } + + return new GroupUpdateMemberChangeMessage({ + identifier: dbMsgIdentifier, + added: withoutHistory, + groupPk, + typeOfChange: 'added', + createAtNetworkTimestamp, + secretKey: adminSecretKey, + sodium, + ...DisappearingMessages.getExpireDetailsForOutgoingMessage(convo, createAtNetworkTimestamp), + }); +} + +async function getWithHistoryControlMessage({ + convo, + withHistory, + groupPk, + adminSecretKey, + createAtNetworkTimestamp, + dbMsgIdentifier, +}: WithAddWithHistoryMembers & + WithGroupPubkey & { + dbMsgIdentifier: string; + convo: ConversationModel; + adminSecretKey: Uint8ArrayLen64; + createAtNetworkTimestamp: number; + }) { + const sodium = await getSodiumRenderer(); + + if (!withHistory.length) { + return null; + } + + return new GroupUpdateMemberChangeMessage({ + identifier: dbMsgIdentifier, + added: withHistory, + groupPk, + typeOfChange: 'addedWithHistory', + createAtNetworkTimestamp, + secretKey: adminSecretKey, + sodium, + ...DisappearingMessages.getExpireDetailsForOutgoingMessage(convo, createAtNetworkTimestamp), + }); +} + +async function getPromotedControlMessage({ + convo, + promoted, + groupPk, + adminSecretKey, + createAtNetworkTimestamp, + dbMsgIdentifier, +}: WithPromotedMembers & + WithGroupPubkey & { + dbMsgIdentifier: string; + convo: ConversationModel; + adminSecretKey: Uint8ArrayLen64; + createAtNetworkTimestamp: number; + }) { + const sodium = await getSodiumRenderer(); + + if (!promoted.length) { + return null; + } + + return new GroupUpdateMemberChangeMessage({ + identifier: dbMsgIdentifier, + promoted, + groupPk, + typeOfChange: 'promoted', + createAtNetworkTimestamp, + secretKey: adminSecretKey, + sodium, + ...DisappearingMessages.getExpireDetailsForOutgoingMessage(convo, createAtNetworkTimestamp), + }); +} + +export const GroupUpdateMessageFactory = { + getRemovedControlMessage, + getWithoutHistoryControlMessage, + getWithHistoryControlMessage, + getPromotedControlMessage, +}; diff --git a/ts/session/messages/outgoing/DataMessage.ts b/ts/session/messages/outgoing/DataMessage.ts index 33ef1ef64e..6cefd035f8 100644 --- a/ts/session/messages/outgoing/DataMessage.ts +++ b/ts/session/messages/outgoing/DataMessage.ts @@ -8,4 +8,6 @@ export abstract class DataMessage extends ExpirableMessage { dataMessage: this.dataProto(), }); } + + public abstract dataProto(): SignalService.DataMessage; } diff --git a/ts/session/messages/outgoing/ExpirableMessage.ts b/ts/session/messages/outgoing/ExpirableMessage.ts index dd9538e2c8..a6de8ec140 100644 --- a/ts/session/messages/outgoing/ExpirableMessage.ts +++ b/ts/session/messages/outgoing/ExpirableMessage.ts @@ -16,7 +16,7 @@ export class ExpirableMessage extends ContentMessage { constructor(params: ExpirableMessageParams) { super({ - timestamp: params.timestamp, + createAtNetworkTimestamp: params.createAtNetworkTimestamp, identifier: params.identifier, }); this.expirationType = params.expirationType; @@ -38,17 +38,8 @@ export class ExpirableMessage extends ContentMessage { }); } - public dataProto(): SignalService.DataMessage { - return new SignalService.DataMessage({ - // TODO legacy messages support will be removed in a future release - expireTimer: - (this.expirationType === 'unknown' || !this.expirationType) && - this.expireTimer && - this.expireTimer > -1 - ? this.expireTimer - : undefined, - }); - } + // Note: dataProto() or anything else must be implemented in the child classes + // public dataProto() public getDisappearingMessageType(): DisappearingMessageType | undefined { return this.expirationType || undefined; diff --git a/ts/session/messages/outgoing/Message.ts b/ts/session/messages/outgoing/Message.ts index 98716b8b88..16a4e7fcb9 100644 --- a/ts/session/messages/outgoing/Message.ts +++ b/ts/session/messages/outgoing/Message.ts @@ -1,21 +1,27 @@ import { v4 as uuid } from 'uuid'; export interface MessageParams { - timestamp: number; + createAtNetworkTimestamp: number; identifier?: string; } export abstract class Message { - public readonly timestamp: number; + /** + * This is the network timestamp when this message was created (and so, potentially signed). + * This must be used as the envelope timestamp, as other devices are going to use it to verify messages. + * There is also the stored_at/effectiveTimestamp which we get back once we sent a message to the recipient's swarm, but that's not included here. + */ + public readonly createAtNetworkTimestamp: number; public readonly identifier: string; - constructor({ timestamp, identifier }: MessageParams) { - this.timestamp = timestamp; + constructor({ createAtNetworkTimestamp, identifier }: MessageParams) { + this.createAtNetworkTimestamp = createAtNetworkTimestamp; if (identifier && identifier.length === 0) { throw new Error('Cannot set empty identifier'); } - if (!timestamp) { - throw new Error('Cannot set undefined timestamp'); + + if (!createAtNetworkTimestamp || createAtNetworkTimestamp <= 0) { + throw new Error('Cannot set undefined createAtNetworkTimestamp or <=0'); } this.identifier = identifier || uuid(); } diff --git a/ts/session/messages/outgoing/controlMessage/CallMessage.ts b/ts/session/messages/outgoing/controlMessage/CallMessage.ts index d8bb2831ed..f621ad8d17 100644 --- a/ts/session/messages/outgoing/controlMessage/CallMessage.ts +++ b/ts/session/messages/outgoing/controlMessage/CallMessage.ts @@ -41,7 +41,7 @@ export class CallMessage extends ExpirableMessage { public contentProto(): SignalService.Content { const content = super.contentProto(); - content.callMessage = this.dataCallProto(); + content.callMessage = this.callProto(); return content; } @@ -49,7 +49,7 @@ export class CallMessage extends ExpirableMessage { return TTL_DEFAULT.CALL_MESSAGE; } - private dataCallProto(): SignalService.CallMessage { + private callProto(): SignalService.CallMessage { return new SignalService.CallMessage({ type: this.type, sdpMLineIndexes: this.sdpMLineIndexes, diff --git a/ts/session/messages/outgoing/controlMessage/ConfigurationMessage.ts b/ts/session/messages/outgoing/controlMessage/ConfigurationMessage.ts deleted file mode 100644 index 00ec330c3f..0000000000 --- a/ts/session/messages/outgoing/controlMessage/ConfigurationMessage.ts +++ /dev/null @@ -1,217 +0,0 @@ -// this is not a very good name, but a configuration message is a message sent to our other devices so sync our current public and closed groups - -import { SignalService } from '../../../../protobuf'; -import { MessageParams } from '../Message'; -import { ECKeyPair } from '../../../../receiver/keypairs'; -import { fromHexToArray } from '../../../utils/String'; -import { PubKey } from '../../../types'; -import { ContentMessage } from '..'; - -interface ConfigurationMessageParams extends MessageParams { - activeClosedGroups: Array; - activeOpenGroups: Array; - displayName: string; - profilePicture?: string; - profileKey?: Uint8Array; - contacts: Array; -} - -export class ConfigurationMessage extends ContentMessage { - public readonly activeClosedGroups: Array; - public readonly activeOpenGroups: Array; - public readonly displayName: string; - public readonly profilePicture?: string; - public readonly profileKey?: Uint8Array; - public readonly contacts: Array; - - constructor(params: ConfigurationMessageParams) { - super({ timestamp: params.timestamp, identifier: params.identifier }); - this.activeClosedGroups = params.activeClosedGroups; - this.activeOpenGroups = params.activeOpenGroups; - this.displayName = params.displayName; - this.profilePicture = params.profilePicture; - this.profileKey = params.profileKey; - this.contacts = params.contacts; - - if (!this.activeClosedGroups) { - throw new Error('closed group must be set'); - } - - if (!this.activeOpenGroups) { - throw new Error('open group must be set'); - } - - if (!this.displayName || !this.displayName?.length) { - throw new Error('displayName must be set'); - } - - if (this.profilePicture && typeof this.profilePicture !== 'string') { - throw new Error('profilePicture set but not an Uin8Array'); - } - - if (this.profileKey && !(this.profileKey instanceof Uint8Array)) { - throw new Error('profileKey set but not an Uin8Array'); - } - - if (!this.contacts) { - throw new Error('contacts must be set'); - } - } - - public contentProto(): SignalService.Content { - return new SignalService.Content({ - configurationMessage: this.configurationProto(), - }); - } - - protected configurationProto(): SignalService.ConfigurationMessage { - return new SignalService.ConfigurationMessage({ - closedGroups: this.mapClosedGroupsObjectToProto(this.activeClosedGroups), - openGroups: this.activeOpenGroups, - displayName: this.displayName, - profilePicture: this.profilePicture, - profileKey: this.profileKey, - contacts: this.mapContactsObjectToProto(this.contacts), - }); - } - - private mapClosedGroupsObjectToProto( - closedGroups: Array - ): Array { - return (closedGroups || []).map(m => m.toProto()); - } - - private mapContactsObjectToProto( - contacts: Array - ): Array { - return (contacts || []).map(m => m.toProto()); - } -} - -export class ConfigurationMessageContact { - public publicKey: string; - public displayName: string; - public profilePictureURL?: string; - public profileKey?: Uint8Array; - public isApproved?: boolean; - public isBlocked?: boolean; - public didApproveMe?: boolean; - - public constructor({ - publicKey, - displayName, - profilePictureURL, - profileKey, - isApproved, - isBlocked, - didApproveMe, - }: { - publicKey: string; - displayName: string; - profilePictureURL?: string; - profileKey?: Uint8Array; - isApproved?: boolean; - isBlocked?: boolean; - didApproveMe?: boolean; - }) { - this.publicKey = publicKey; - this.displayName = displayName; - this.profilePictureURL = profilePictureURL; - this.profileKey = profileKey; - this.isApproved = isApproved; - this.isBlocked = isBlocked; - this.didApproveMe = didApproveMe; - - // will throw if public key is invalid - PubKey.cast(publicKey); - - if (this.displayName?.length === 0) { - throw new Error('displayName must be set or undefined'); - } - - if (this.profilePictureURL !== undefined && this.profilePictureURL?.length === 0) { - throw new Error('profilePictureURL must either undefined or not empty'); - } - if (this.profileKey !== undefined && this.profileKey?.length === 0) { - throw new Error('profileKey must either undefined or not empty'); - } - } - - public toProto(): SignalService.ConfigurationMessage.Contact { - return new SignalService.ConfigurationMessage.Contact({ - publicKey: fromHexToArray(this.publicKey), - name: this.displayName, - profilePicture: this.profilePictureURL, - profileKey: this.profileKey, - isApproved: this.isApproved, - isBlocked: this.isBlocked, - didApproveMe: this.didApproveMe, - }); - } -} - -export class ConfigurationMessageClosedGroup { - public publicKey: string; - public name: string; - public encryptionKeyPair: ECKeyPair; - public members: Array; - public admins: Array; - - public constructor({ - publicKey, - name, - encryptionKeyPair, - members, - admins, - }: { - publicKey: string; - name: string; - encryptionKeyPair: ECKeyPair; - members: Array; - admins: Array; - }) { - this.publicKey = publicKey; - this.name = name; - this.encryptionKeyPair = encryptionKeyPair; - this.members = members; - this.admins = admins; - - // will throw if publik key is invalid - PubKey.cast(publicKey); - - if ( - !encryptionKeyPair?.privateKeyData?.byteLength || - !encryptionKeyPair?.publicKeyData?.byteLength - ) { - throw new Error('Encryption key pair looks invalid'); - } - - if (!this.name?.length) { - throw new Error('name must be set'); - } - - if (!this.members?.length) { - throw new Error('members must be set'); - } - if (!this.admins?.length) { - throw new Error('admins must be set'); - } - - if (this.admins.some(a => !this.members.includes(a))) { - throw new Error('some admins are not members'); - } - } - - public toProto(): SignalService.ConfigurationMessage.ClosedGroup { - return new SignalService.ConfigurationMessage.ClosedGroup({ - publicKey: fromHexToArray(this.publicKey), - name: this.name, - encryptionKeyPair: { - publicKey: this.encryptionKeyPair.publicKeyData, - privateKey: this.encryptionKeyPair.privateKeyData, - }, - members: this.members.map(fromHexToArray), - admins: this.admins.map(fromHexToArray), - }); - } -} diff --git a/ts/session/messages/outgoing/controlMessage/DataExtractionNotificationMessage.ts b/ts/session/messages/outgoing/controlMessage/DataExtractionNotificationMessage.ts index e569d813b0..c2cc384e4e 100644 --- a/ts/session/messages/outgoing/controlMessage/DataExtractionNotificationMessage.ts +++ b/ts/session/messages/outgoing/controlMessage/DataExtractionNotificationMessage.ts @@ -1,11 +1,14 @@ -import { getMessageQueue } from '../../..'; +import { v4 as uuid } from 'uuid'; + import { SignalService } from '../../../../protobuf'; import { SnodeNamespaces } from '../../../apis/snode_api/namespaces'; -import { getConversationController } from '../../../conversations'; +import { ConvoHub } from '../../../conversations'; import { DisappearingMessages } from '../../../disappearing_messages'; import { PubKey } from '../../../types'; import { UserUtils } from '../../../utils'; import { ExpirableMessage, ExpirableMessageParams } from '../ExpirableMessage'; +import { NetworkTime } from '../../../../util/NetworkTime'; +import { MessageQueue } from '../../../sending'; interface DataExtractionNotificationMessageParams extends ExpirableMessageParams { referencedAttachmentTimestamp: number; @@ -25,11 +28,11 @@ export class DataExtractionNotificationMessage extends ExpirableMessage { public contentProto(): SignalService.Content { const content = super.contentProto(); - content.dataExtractionNotification = this.dataExtractionProto(); + content.dataExtractionNotification = this.extractionProto(); return content; } - protected dataExtractionProto(): SignalService.DataExtractionNotification { + protected extractionProto(): SignalService.DataExtractionNotification { const ACTION_ENUM = SignalService.DataExtractionNotification.Type; const action = ACTION_ENUM.MEDIA_SAVED; // we cannot know when user screenshots, so it can only be a media saved on desktop @@ -49,7 +52,7 @@ export const sendDataExtractionNotification = async ( attachmentSender: string, referencedAttachmentTimestamp: number ) => { - const convo = getConversationController().get(conversationId); + const convo = ConvoHub.use().get(conversationId); if (!convo || !convo.isPrivate() || convo.isMe() || UserUtils.isUsFromCache(attachmentSender)) { window.log.warn('Not sending saving attachment notification for', attachmentSender); return; @@ -61,7 +64,8 @@ export const sendDataExtractionNotification = async ( // but also expire on the recipient's side (and synced) once read. const dataExtractionNotificationMessage = new DataExtractionNotificationMessage({ referencedAttachmentTimestamp, - timestamp: Date.now(), + identifier: uuid(), + createAtNetworkTimestamp: NetworkTime.now(), expirationType, expireTimer, }); @@ -72,11 +76,11 @@ export const sendDataExtractionNotification = async ( ); try { - await getMessageQueue().sendToPubKey( + await MessageQueue.use().sendTo1o1NonDurably({ pubkey, - dataExtractionNotificationMessage, - SnodeNamespaces.UserMessages - ); + message: dataExtractionNotificationMessage, + namespace: SnodeNamespaces.Default, + }); } catch (e) { window.log.warn('failed to send data extraction notification', e); } diff --git a/ts/session/messages/outgoing/controlMessage/ExpirationTimerUpdateMessage.ts b/ts/session/messages/outgoing/controlMessage/ExpirationTimerUpdateMessage.ts index c9bb43820b..5fe234ced6 100644 --- a/ts/session/messages/outgoing/controlMessage/ExpirationTimerUpdateMessage.ts +++ b/ts/session/messages/outgoing/controlMessage/ExpirationTimerUpdateMessage.ts @@ -19,7 +19,7 @@ export class ExpirationTimerUpdateMessage extends DataMessage { constructor(params: ExpirationTimerUpdateMessageParams) { super({ - timestamp: params.timestamp, + createAtNetworkTimestamp: params.createAtNetworkTimestamp, identifier: params.identifier, expirationType: params.expirationType, expireTimer: params.expireTimer, @@ -38,7 +38,7 @@ export class ExpirationTimerUpdateMessage extends DataMessage { } public dataProto(): SignalService.DataMessage { - const data = super.dataProto(); + const data = new SignalService.DataMessage({}); data.flags = SignalService.DataMessage.Flags.EXPIRATION_TIMER_UPDATE; diff --git a/ts/session/messages/outgoing/controlMessage/MessageRequestResponse.ts b/ts/session/messages/outgoing/controlMessage/MessageRequestResponse.ts index 8b427831a9..ddfbf370ee 100644 --- a/ts/session/messages/outgoing/controlMessage/MessageRequestResponse.ts +++ b/ts/session/messages/outgoing/controlMessage/MessageRequestResponse.ts @@ -17,7 +17,7 @@ export class MessageRequestResponse extends ContentMessage { constructor(params: MessageRequestResponseParams) { super({ - timestamp: params.timestamp, + createAtNetworkTimestamp: params.createAtNetworkTimestamp, } as MessageRequestResponseParams); const profile = buildProfileForOutgoingMessage(params); diff --git a/ts/session/messages/outgoing/controlMessage/SharedConfigMessage.ts b/ts/session/messages/outgoing/controlMessage/SharedConfigMessage.ts deleted file mode 100644 index 5718b5a403..0000000000 --- a/ts/session/messages/outgoing/controlMessage/SharedConfigMessage.ts +++ /dev/null @@ -1,34 +0,0 @@ -// this is not a very good name, but a configuration message is a message sent to our other devices so sync our current public and closed groups -import Long from 'long'; - -import { ContentMessage } from '..'; -import { SignalService } from '../../../../protobuf'; -import { TTL_DEFAULT } from '../../../constants'; -import { MessageParams } from '../Message'; - -interface SharedConfigParams extends MessageParams { - seqno: Long; - kind: SignalService.SharedConfigMessage.Kind; - readyToSendData: Uint8Array; -} - -export class SharedConfigMessage extends ContentMessage { - public readonly seqno: Long; - public readonly kind: SignalService.SharedConfigMessage.Kind; - public readonly readyToSendData: Uint8Array; - - constructor(params: SharedConfigParams) { - super({ timestamp: params.timestamp, identifier: params.identifier }); - this.readyToSendData = params.readyToSendData; - this.kind = params.kind; - this.seqno = params.seqno; - } - - public contentProto(): SignalService.Content { - throw new Error('SharedConfigMessage must not be sent wrapped anymore'); - } - - public ttl(): number { - return TTL_DEFAULT.CONFIG_MESSAGE; - } -} diff --git a/ts/session/messages/outgoing/controlMessage/TypingMessage.ts b/ts/session/messages/outgoing/controlMessage/TypingMessage.ts index b5e7ee39f1..8b8132b510 100644 --- a/ts/session/messages/outgoing/controlMessage/TypingMessage.ts +++ b/ts/session/messages/outgoing/controlMessage/TypingMessage.ts @@ -5,17 +5,17 @@ import { MessageParams } from '../Message'; interface TypingMessageParams extends MessageParams { isTyping: boolean; - typingTimestamp?: number; } export class TypingMessage extends ContentMessage { public readonly isTyping: boolean; - public readonly typingTimestamp?: number; constructor(params: TypingMessageParams) { - super({ timestamp: params.timestamp, identifier: params.identifier }); + super({ + createAtNetworkTimestamp: params.createAtNetworkTimestamp, + identifier: params.identifier, + }); this.isTyping = params.isTyping; - this.typingTimestamp = params.typingTimestamp; } public ttl(): number { @@ -29,14 +29,13 @@ export class TypingMessage extends ContentMessage { } protected typingProto(): SignalService.TypingMessage { - const ACTION_ENUM = SignalService.TypingMessage.Action; - - const action = this.isTyping ? ACTION_ENUM.STARTED : ACTION_ENUM.STOPPED; - const finalTimestamp = this.typingTimestamp || Date.now(); + const action = this.isTyping + ? SignalService.TypingMessage.Action.STARTED + : SignalService.TypingMessage.Action.STOPPED; const typingMessage = new SignalService.TypingMessage(); typingMessage.action = action; - typingMessage.timestamp = finalTimestamp; + typingMessage.timestamp = this.createAtNetworkTimestamp; return typingMessage; } diff --git a/ts/session/messages/outgoing/controlMessage/UnsendMessage.ts b/ts/session/messages/outgoing/controlMessage/UnsendMessage.ts index 2fc93ab7a6..ca5323f744 100644 --- a/ts/session/messages/outgoing/controlMessage/UnsendMessage.ts +++ b/ts/session/messages/outgoing/controlMessage/UnsendMessage.ts @@ -3,7 +3,6 @@ import { ContentMessage } from '../ContentMessage'; import { MessageParams } from '../Message'; interface UnsendMessageParams extends MessageParams { - timestamp: number; author: string; } @@ -11,7 +10,10 @@ export class UnsendMessage extends ContentMessage { private readonly author: string; constructor(params: UnsendMessageParams) { - super({ timestamp: params.timestamp, author: params.author } as MessageParams); + super({ + createAtNetworkTimestamp: params.createAtNetworkTimestamp, + author: params.author, + } as MessageParams); this.author = params.author; } @@ -23,7 +25,7 @@ export class UnsendMessage extends ContentMessage { public unsendProto(): SignalService.Unsend { return new SignalService.Unsend({ - timestamp: this.timestamp, + timestamp: this.createAtNetworkTimestamp, author: this.author, }); } diff --git a/ts/session/messages/outgoing/controlMessage/group/ClosedGroupMessage.ts b/ts/session/messages/outgoing/controlMessage/group/ClosedGroupMessage.ts index 60699cf5e0..fa8ae6c0c8 100644 --- a/ts/session/messages/outgoing/controlMessage/group/ClosedGroupMessage.ts +++ b/ts/session/messages/outgoing/controlMessage/group/ClosedGroupMessage.ts @@ -1,17 +1,18 @@ import { SignalService } from '../../../../../protobuf'; import { PubKey } from '../../../../types'; -import { ExpirableMessage, ExpirableMessageParams } from '../../ExpirableMessage'; +import { DataMessage } from '../../DataMessage'; +import { ExpirableMessageParams } from '../../ExpirableMessage'; export interface ClosedGroupMessageParams extends ExpirableMessageParams { groupId: string | PubKey; } -export abstract class ClosedGroupMessage extends ExpirableMessage { +export abstract class ClosedGroupMessage extends DataMessage { public readonly groupId: PubKey; constructor(params: ClosedGroupMessageParams) { super({ - timestamp: params.timestamp, + createAtNetworkTimestamp: params.createAtNetworkTimestamp, identifier: params.identifier, expirationType: params.expirationType, expireTimer: params.expireTimer, @@ -41,7 +42,7 @@ export abstract class ClosedGroupMessage extends ExpirableMessage { } public dataProto(): SignalService.DataMessage { - const dataMessage = super.dataProto(); + const dataMessage = new SignalService.DataMessage({}); dataMessage.closedGroupControlMessage = new SignalService.DataMessage.ClosedGroupControlMessage(); diff --git a/ts/session/messages/outgoing/controlMessage/group/ClosedGroupNewMessage.ts b/ts/session/messages/outgoing/controlMessage/group/ClosedGroupNewMessage.ts index 53b0a76c41..5f77171777 100644 --- a/ts/session/messages/outgoing/controlMessage/group/ClosedGroupNewMessage.ts +++ b/ts/session/messages/outgoing/controlMessage/group/ClosedGroupNewMessage.ts @@ -19,7 +19,7 @@ export class ClosedGroupNewMessage extends ClosedGroupMessage { constructor(params: ClosedGroupNewMessageParams) { super({ - timestamp: params.timestamp, + createAtNetworkTimestamp: params.createAtNetworkTimestamp, identifier: params.identifier, groupId: params.groupId, expirationType: params.expirationType, diff --git a/ts/session/messages/outgoing/controlMessage/group/ClosedGroupRemovedMembersMessage.ts b/ts/session/messages/outgoing/controlMessage/group/ClosedGroupRemovedMembersMessage.ts index 3d3fb5d933..d97667dcc3 100644 --- a/ts/session/messages/outgoing/controlMessage/group/ClosedGroupRemovedMembersMessage.ts +++ b/ts/session/messages/outgoing/controlMessage/group/ClosedGroupRemovedMembersMessage.ts @@ -11,7 +11,7 @@ export class ClosedGroupRemovedMembersMessage extends ClosedGroupMessage { constructor(params: ClosedGroupRemovedMembersMessageParams) { super({ - timestamp: params.timestamp, + createAtNetworkTimestamp: params.createAtNetworkTimestamp, identifier: params.identifier, groupId: params.groupId, expirationType: params.expirationType, diff --git a/ts/session/messages/outgoing/controlMessage/group_v2/GroupUpdateMessage.ts b/ts/session/messages/outgoing/controlMessage/group_v2/GroupUpdateMessage.ts new file mode 100644 index 0000000000..7429c7c16e --- /dev/null +++ b/ts/session/messages/outgoing/controlMessage/group_v2/GroupUpdateMessage.ts @@ -0,0 +1,32 @@ +import { GroupPubkeyType } from 'libsession_util_nodejs'; +import { LibSodiumWrappers } from '../../../../crypto'; +import { DataMessage } from '../../DataMessage'; +import { ExpirableMessageParams } from '../../ExpirableMessage'; + +export type AdminSigDetails = { + secretKey: Uint8Array; + sodium: LibSodiumWrappers; +}; + +export interface GroupUpdateMessageParams extends ExpirableMessageParams { + groupPk: GroupPubkeyType; +} + +export abstract class GroupUpdateMessage extends DataMessage { + public readonly destination: GroupUpdateMessageParams['groupPk']; + + constructor(params: GroupUpdateMessageParams) { + super(params); + + this.destination = params.groupPk; + if (!this.destination || this.destination.length === 0) { + throw new Error('destination must be set to the groupPubkey'); + } + } + + // do not override the dataProto here, we want it to be defined in the child classes + // public abstract dataProto(): SignalService.DataMessage; + + public abstract isFor1o1Swarm(): boolean; + public abstract isForGroupSwarm(): boolean; +} diff --git a/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateDeleteMemberContentMessage.ts b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateDeleteMemberContentMessage.ts new file mode 100644 index 0000000000..91ac703065 --- /dev/null +++ b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateDeleteMemberContentMessage.ts @@ -0,0 +1,81 @@ +import { PubkeyType } from 'libsession_util_nodejs'; +import _, { isEmpty } from 'lodash'; +import { SignalService } from '../../../../../../protobuf'; +import { SnodeNamespaces } from '../../../../../apis/snode_api/namespaces'; +import { stringToUint8Array } from '../../../../../utils/String'; +import { Preconditions } from '../../../preconditions'; +import { + AdminSigDetails, + GroupUpdateMessage, + GroupUpdateMessageParams, +} from '../GroupUpdateMessage'; + +// Note: `Partial` because that message can also be sent as a non-admin and we always give sodium but not always the secretKey +type Params = GroupUpdateMessageParams & + Partial> & + Omit & { + memberSessionIds: Array; + messageHashes: Array; + }; + +/** + * GroupUpdateDeleteMemberContentMessage is sent as a message to group's swarm. + */ +export class GroupUpdateDeleteMemberContentMessage extends GroupUpdateMessage { + public readonly createAtNetworkTimestamp: Params['createAtNetworkTimestamp']; + public readonly memberSessionIds: Params['memberSessionIds']; + public readonly messageHashes: Params['messageHashes']; + public readonly secretKey: Params['secretKey']; + public readonly sodium: Params['sodium']; + public readonly namespace = SnodeNamespaces.ClosedGroupMessages; + + constructor(params: Params) { + super(params); + + this.memberSessionIds = params.memberSessionIds; + this.messageHashes = params.messageHashes; + this.secretKey = params.secretKey; + this.createAtNetworkTimestamp = params.createAtNetworkTimestamp; + this.sodium = params.sodium; + + if (isEmpty(this.memberSessionIds) && isEmpty(this.messageHashes)) { + throw new Error( + 'GroupUpdateDeleteMemberContentMessage needs members or messageHashes to be filled' + ); + } + + Preconditions.checkArrayHaveOnly05Pubkeys({ + arr: this.memberSessionIds, + context: this.constructor.toString(), + varName: 'memberSessionIds', + }); + } + + public dataProto(): SignalService.DataMessage { + // If we have the secretKey, we can delete it for anyone `"DELETE_CONTENT" || timestamp || sessionId[0] || ... || messageHashes[0] || ...` + + let adminSignature = new Uint8Array(); + if (this.secretKey && !_.isEmpty(this.secretKey) && this.sodium) { + adminSignature = this.sodium.crypto_sign_detached( + stringToUint8Array( + `DELETE_CONTENT${this.createAtNetworkTimestamp}${this.memberSessionIds.join('')}${this.messageHashes.join('')}` + ), + this.secretKey + ); + } + const deleteMemberContent = new SignalService.GroupUpdateDeleteMemberContentMessage({ + adminSignature, + memberSessionIds: this.memberSessionIds, + messageHashes: this.messageHashes, + }); + + return new SignalService.DataMessage({ groupUpdateMessage: { deleteMemberContent } }); + } + + public isForGroupSwarm(): boolean { + return true; + } + public isFor1o1Swarm(): boolean { + return false; + } +} diff --git a/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInfoChangeMessage.ts b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInfoChangeMessage.ts new file mode 100644 index 0000000000..e97d149363 --- /dev/null +++ b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInfoChangeMessage.ts @@ -0,0 +1,105 @@ +import { isEmpty, isFinite } from 'lodash'; +import { SignalService } from '../../../../../../protobuf'; +import { SnodeNamespaces } from '../../../../../apis/snode_api/namespaces'; +import { LibSodiumWrappers } from '../../../../../crypto'; +import { stringToUint8Array } from '../../../../../utils/String'; +import { + AdminSigDetails, + GroupUpdateMessage, + GroupUpdateMessageParams, +} from '../GroupUpdateMessage'; + +type NameChangeParams = GroupUpdateMessageParams & + AdminSigDetails & { + typeOfChange: SignalService.GroupUpdateInfoChangeMessage.Type.NAME; + updatedName: string; + }; + +type AvatarChangeParams = GroupUpdateMessageParams & + AdminSigDetails & { + typeOfChange: SignalService.GroupUpdateInfoChangeMessage.Type.AVATAR; + }; + +type DisappearingMessageChangeParams = GroupUpdateMessageParams & + AdminSigDetails & { + typeOfChange: SignalService.GroupUpdateInfoChangeMessage.Type.DISAPPEARING_MESSAGES; + updatedExpirationSeconds: number; + }; + +/** + * GroupUpdateInfoChangeMessage is sent as a message to group's swarm. + */ +export class GroupUpdateInfoChangeMessage extends GroupUpdateMessage { + public readonly typeOfChange: SignalService.GroupUpdateInfoChangeMessage.Type; + public readonly updatedName: string = ''; + public readonly updatedExpirationSeconds: number = 0; + public readonly namespace = SnodeNamespaces.ClosedGroupMessages; + private readonly secretKey: Uint8Array; // not sent, only used for signing content as part of the message + private readonly sodium: LibSodiumWrappers; + + constructor(params: NameChangeParams | AvatarChangeParams | DisappearingMessageChangeParams) { + super(params); + const types = SignalService.GroupUpdateInfoChangeMessage.Type; + + this.typeOfChange = params.typeOfChange; + this.secretKey = params.secretKey; + this.sodium = params.sodium; + + switch (params.typeOfChange) { + case types.NAME: { + if (isEmpty(params.updatedName)) { + throw new Error('A group needs a name'); + } + this.updatedName = params.updatedName; + break; + } + case types.AVATAR: + // nothing to do for avatar + break; + case types.DISAPPEARING_MESSAGES: { + if (!isFinite(params.updatedExpirationSeconds) || params.updatedExpirationSeconds < 0) { + throw new Error('Invalid disappearing message timer. Must be finite and >=0'); + } + this.updatedExpirationSeconds = params.updatedExpirationSeconds; + break; + } + default: + break; + } + } + + public dataProto(): SignalService.DataMessage { + const infoChangeMessage = new SignalService.GroupUpdateInfoChangeMessage({ + type: this.typeOfChange, + adminSignature: this.sodium.crypto_sign_detached( + stringToUint8Array(`INFO_CHANGE${this.typeOfChange}${this.createAtNetworkTimestamp}`), + this.secretKey + ), + }); + switch (this.typeOfChange) { + case SignalService.GroupUpdateInfoChangeMessage.Type.NAME: + infoChangeMessage.updatedName = this.updatedName; + + break; + case SignalService.GroupUpdateInfoChangeMessage.Type.DISAPPEARING_MESSAGES: + infoChangeMessage.updatedExpiration = this.updatedExpirationSeconds; + + break; + + case SignalService.GroupUpdateInfoChangeMessage.Type.AVATAR: + // nothing to do for the avatar case + break; + default: + break; + } + + return new SignalService.DataMessage({ groupUpdateMessage: { infoChangeMessage } }); + } + + public isForGroupSwarm(): boolean { + return true; + } + public isFor1o1Swarm(): boolean { + return false; + } +} diff --git a/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInviteResponseMessage.ts b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInviteResponseMessage.ts new file mode 100644 index 0000000000..eb038f1500 --- /dev/null +++ b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInviteResponseMessage.ts @@ -0,0 +1,49 @@ +import { SignalService } from '../../../../../../protobuf'; +import { SnodeNamespaces } from '../../../../../apis/snode_api/namespaces'; +import { getOurProfile } from '../../../../../utils/User'; +import { GroupUpdateMessage, GroupUpdateMessageParams } from '../GroupUpdateMessage'; + +type Params = GroupUpdateMessageParams & { + isApproved: boolean; +}; + +/** + * GroupUpdateInviteResponseMessage is sent to the group's swarm. + * Our pubkey, as the leaving member is part of the encryption of libsession for the new groups + * + */ +export class GroupUpdateInviteResponseMessage extends GroupUpdateMessage { + public readonly isApproved: Params['isApproved']; + public readonly namespace = SnodeNamespaces.ClosedGroupMessages; + + constructor(params: Params) { + super(params); + this.isApproved = params.isApproved; + } + + public dataProto(): SignalService.DataMessage { + const ourProfile = getOurProfile(); + + const inviteResponse = new SignalService.GroupUpdateInviteResponseMessage({ + isApproved: true, + }); + + return new SignalService.DataMessage({ + profileKey: ourProfile?.profileKey, + profile: ourProfile + ? { + displayName: ourProfile.displayName, + profilePicture: ourProfile.avatarPointer, + } + : undefined, + groupUpdateMessage: { inviteResponse }, + }); + } + + public isForGroupSwarm(): boolean { + return true; + } + public isFor1o1Swarm(): boolean { + return false; + } +} diff --git a/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberChangeMessage.ts b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberChangeMessage.ts new file mode 100644 index 0000000000..0292898f1c --- /dev/null +++ b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberChangeMessage.ts @@ -0,0 +1,127 @@ +import { PubkeyType } from 'libsession_util_nodejs'; +import { isEmpty } from 'lodash'; +import { SignalService } from '../../../../../../protobuf'; +import { assertUnreachable } from '../../../../../../types/sqlSharedTypes'; +import { SnodeNamespaces } from '../../../../../apis/snode_api/namespaces'; +import { LibSodiumWrappers } from '../../../../../crypto'; +import { stringToUint8Array } from '../../../../../utils/String'; +import { + AdminSigDetails, + GroupUpdateMessage, + GroupUpdateMessageParams, +} from '../GroupUpdateMessage'; + +type MembersAddedMessageParams = GroupUpdateMessageParams & { + typeOfChange: 'added'; + added: Array; +}; + +type MembersAddedWithHistoryMessageParams = GroupUpdateMessageParams & { + typeOfChange: 'addedWithHistory'; + added: Array; +}; + +type MembersRemovedMessageParams = GroupUpdateMessageParams & { + typeOfChange: 'removed'; + removed: Array; +}; + +type MembersPromotedMessageParams = GroupUpdateMessageParams & { + typeOfChange: 'promoted'; + promoted: Array; +}; + +/** + * GroupUpdateInfoChangeMessage is sent to the group's swarm. + */ +export class GroupUpdateMemberChangeMessage extends GroupUpdateMessage { + public readonly typeOfChange: 'added' | 'addedWithHistory' | 'removed' | 'promoted'; + + public readonly memberSessionIds: Array = []; // added, removed, promoted based on the type. + public readonly namespace = SnodeNamespaces.ClosedGroupMessages; + private readonly secretKey: Uint8Array; // not sent, only used for signing content as part of the message + private readonly sodium: LibSodiumWrappers; + + constructor( + params: ( + | MembersAddedMessageParams + | MembersRemovedMessageParams + | MembersPromotedMessageParams + | MembersAddedWithHistoryMessageParams + ) & + AdminSigDetails + ) { + super(params); + const { typeOfChange } = params; + + this.typeOfChange = typeOfChange; + this.secretKey = params.secretKey; + this.sodium = params.sodium; + + switch (typeOfChange) { + case 'added': { + if (isEmpty(params.added)) { + throw new Error('added members list cannot be empty'); + } + this.memberSessionIds = params.added; + break; + } + case 'addedWithHistory': { + if (isEmpty(params.added)) { + throw new Error('addedWithHistory members list cannot be empty'); + } + this.memberSessionIds = params.added; + break; + } + case 'removed': { + if (isEmpty(params.removed)) { + throw new Error('removed members list cannot be empty'); + } + this.memberSessionIds = params.removed; + break; + } + case 'promoted': { + if (isEmpty(params.promoted)) { + throw new Error('promoted members list cannot be empty'); + } + this.memberSessionIds = params.promoted; + break; + } + default: + assertUnreachable(typeOfChange, 'unhandled switch case'); + } + } + + public dataProto(): SignalService.DataMessage { + const { Type } = SignalService.GroupUpdateMemberChangeMessage; + + const type: SignalService.GroupUpdateMemberChangeMessage.Type = + this.typeOfChange === 'added' || this.typeOfChange === 'addedWithHistory' + ? Type.ADDED + : this.typeOfChange === 'removed' + ? Type.REMOVED + : Type.PROMOTED; + + const memberChangeMessage = new SignalService.GroupUpdateMemberChangeMessage({ + type, + memberSessionIds: this.memberSessionIds, + adminSignature: this.sodium.crypto_sign_detached( + stringToUint8Array(`MEMBER_CHANGE${type}${this.createAtNetworkTimestamp}`), + this.secretKey + ), + }); + + if (type === Type.ADDED && this.typeOfChange === 'addedWithHistory') { + memberChangeMessage.historyShared = true; + } + + return new SignalService.DataMessage({ groupUpdateMessage: { memberChangeMessage } }); + } + + public isForGroupSwarm(): boolean { + return true; + } + public isFor1o1Swarm(): boolean { + return false; + } +} diff --git a/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberLeftMessage.ts b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberLeftMessage.ts new file mode 100644 index 0000000000..816c0f0f3e --- /dev/null +++ b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberLeftMessage.ts @@ -0,0 +1,25 @@ +import { SignalService } from '../../../../../../protobuf'; +import { SnodeNamespaces } from '../../../../../apis/snode_api/namespaces'; +import { GroupUpdateMessage } from '../GroupUpdateMessage'; + +/** + * GroupUpdateMemberLeftMessage is sent to the group's swarm. + * Our pubkey, as the leaving member is part of the encryption of libsession for the new groups + * + */ +export class GroupUpdateMemberLeftMessage extends GroupUpdateMessage { + public readonly namespace = SnodeNamespaces.ClosedGroupMessages; + + public dataProto(): SignalService.DataMessage { + const memberLeftMessage = new SignalService.GroupUpdateMemberLeftMessage({}); + + return new SignalService.DataMessage({ groupUpdateMessage: { memberLeftMessage } }); + } + + public isForGroupSwarm(): boolean { + return true; + } + public isFor1o1Swarm(): boolean { + return false; + } +} diff --git a/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberLeftNotificationMessage.ts b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberLeftNotificationMessage.ts new file mode 100644 index 0000000000..a218029b95 --- /dev/null +++ b/ts/session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberLeftNotificationMessage.ts @@ -0,0 +1,26 @@ +import { SignalService } from '../../../../../../protobuf'; +import { SnodeNamespaces } from '../../../../../apis/snode_api/namespaces'; +import { GroupUpdateMessage } from '../GroupUpdateMessage'; + +/** + * GroupUpdateMemberLeftNotificationMessage is sent to the group's swarm. + * Our pubkey, as the leaving member is part of the encryption of libsession for the new groups + * + */ +export class GroupUpdateMemberLeftNotificationMessage extends GroupUpdateMessage { + public readonly namespace = SnodeNamespaces.ClosedGroupMessages; + + public dataProto(): SignalService.DataMessage { + const memberLeftNotificationMessage = + new SignalService.GroupUpdateMemberLeftNotificationMessage({}); + + return new SignalService.DataMessage({ groupUpdateMessage: { memberLeftNotificationMessage } }); + } + + public isForGroupSwarm(): boolean { + return true; + } + public isFor1o1Swarm(): boolean { + return false; + } +} diff --git a/ts/session/messages/outgoing/controlMessage/group_v2/to_user/GroupUpdateInviteMessage.ts b/ts/session/messages/outgoing/controlMessage/group_v2/to_user/GroupUpdateInviteMessage.ts new file mode 100644 index 0000000000..0ff17c7456 --- /dev/null +++ b/ts/session/messages/outgoing/controlMessage/group_v2/to_user/GroupUpdateInviteMessage.ts @@ -0,0 +1,68 @@ +import { SignalService } from '../../../../../../protobuf'; +import { UserUtils } from '../../../../../utils'; +import { Preconditions } from '../../../preconditions'; +import { GroupUpdateMessage, GroupUpdateMessageParams } from '../GroupUpdateMessage'; + +interface Params extends GroupUpdateMessageParams { + groupName: string; + adminSignature: Uint8Array; // this is a signature of `"INVITE" || inviteeSessionId || timestamp` + memberAuthData: Uint8Array; +} + +/** + * GroupUpdateInviteMessage is sent as a 1o1 message to the recipient, not through the group's swarm. + */ +export class GroupUpdateInviteMessage extends GroupUpdateMessage { + public readonly groupName: Params['groupName']; + public readonly adminSignature: Params['adminSignature']; + public readonly memberAuthData: Params['memberAuthData']; + + constructor({ adminSignature, groupName, memberAuthData, ...others }: Params) { + super({ + ...others, + }); + + this.groupName = groupName; // not sure if getting an invite with an empty group name should make us drop an incoming group invite (and the keys associated to it too) + this.adminSignature = adminSignature; + this.memberAuthData = memberAuthData; + + Preconditions.checkUin8tArrayOrThrow({ + data: adminSignature, + expectedLength: 64, + varName: 'adminSignature', + context: this.constructor.toString(), + }); + Preconditions.checkUin8tArrayOrThrow({ + data: memberAuthData, + expectedLength: 100, + varName: 'memberAuthData', + context: this.constructor.toString(), + }); + } + + public dataProto(): SignalService.DataMessage { + const ourProfile = UserUtils.getOurProfile(); + const inviteMessage = new SignalService.GroupUpdateInviteMessage({ + groupSessionId: this.destination, + name: this.groupName, + adminSignature: this.adminSignature, + memberAuthData: this.memberAuthData, + }); + + return new SignalService.DataMessage({ + profile: ourProfile + ? { displayName: ourProfile.displayName, profilePicture: ourProfile.avatarPointer } + : undefined, + profileKey: ourProfile?.profileKey, + groupUpdateMessage: { inviteMessage }, + }); + } + + public isForGroupSwarm(): boolean { + return false; + } + + public isFor1o1Swarm(): boolean { + return true; + } +} diff --git a/ts/session/messages/outgoing/controlMessage/group_v2/to_user/GroupUpdatePromoteMessage.ts b/ts/session/messages/outgoing/controlMessage/group_v2/to_user/GroupUpdatePromoteMessage.ts new file mode 100644 index 0000000000..312af61a39 --- /dev/null +++ b/ts/session/messages/outgoing/controlMessage/group_v2/to_user/GroupUpdatePromoteMessage.ts @@ -0,0 +1,48 @@ +import { GroupPubkeyType } from 'libsession_util_nodejs'; +import { SignalService } from '../../../../../../protobuf'; +import { GroupUpdateMessage, GroupUpdateMessageParams } from '../GroupUpdateMessage'; + +interface Params extends GroupUpdateMessageParams { + groupPk: GroupPubkeyType; + groupIdentitySeed: Uint8Array; + groupName: string; +} + +/** + * GroupUpdatePromoteMessage is sent as a 1o1 message to the recipient, not through the group's swarm. + */ +export class GroupUpdatePromoteMessage extends GroupUpdateMessage { + public readonly groupIdentitySeed: Params['groupIdentitySeed']; + public readonly groupName: Params['groupName']; + + constructor(params: Params) { + super(params); + + this.groupIdentitySeed = params.groupIdentitySeed; + this.groupName = params.groupName; + if (!this.groupIdentitySeed || this.groupIdentitySeed.length !== 32) { + throw new Error('groupIdentitySeed must be set'); + } + if (!this.groupName) { + throw new Error('name must be set and not empty'); + } + } + + public dataProto(): SignalService.DataMessage { + const promoteMessage = new SignalService.GroupUpdatePromoteMessage({ + groupIdentitySeed: this.groupIdentitySeed, + name: this.groupName, + }); + + return new SignalService.DataMessage({ + groupUpdateMessage: { promoteMessage }, + }); + } + + public isForGroupSwarm(): boolean { + return false; + } + public isFor1o1Swarm(): boolean { + return true; + } +} diff --git a/ts/session/messages/outgoing/controlMessage/receipt/ReadReceiptMessage.ts b/ts/session/messages/outgoing/controlMessage/receipt/ReadReceiptMessage.ts index 511a66117b..22770934a8 100644 --- a/ts/session/messages/outgoing/controlMessage/receipt/ReadReceiptMessage.ts +++ b/ts/session/messages/outgoing/controlMessage/receipt/ReadReceiptMessage.ts @@ -1,8 +1,28 @@ +import { ContentMessage } from '../..'; import { SignalService } from '../../../../../protobuf'; -import { ReceiptMessage } from './ReceiptMessage'; +import { MessageParams } from '../../Message'; -export class ReadReceiptMessage extends ReceiptMessage { - public getReceiptType(): SignalService.ReceiptMessage.Type { - return SignalService.ReceiptMessage.Type.READ; +interface ReadReceiptMessageParams extends MessageParams { + timestamps: Array; +} +export class ReadReceiptMessage extends ContentMessage { + public readonly timestamps: Array; + + constructor({ createAtNetworkTimestamp, identifier, timestamps }: ReadReceiptMessageParams) { + super({ createAtNetworkTimestamp, identifier }); + this.timestamps = timestamps; + } + + public contentProto(): SignalService.Content { + return new SignalService.Content({ + receiptMessage: this.receiptProto(), + }); + } + + protected receiptProto(): SignalService.ReceiptMessage { + return new SignalService.ReceiptMessage({ + type: SignalService.ReceiptMessage.Type.READ, + timestamp: this.timestamps, + }); } } diff --git a/ts/session/messages/outgoing/controlMessage/receipt/ReceiptMessage.ts b/ts/session/messages/outgoing/controlMessage/receipt/ReceiptMessage.ts deleted file mode 100644 index 5beebca220..0000000000 --- a/ts/session/messages/outgoing/controlMessage/receipt/ReceiptMessage.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { SignalService } from '../../../../../protobuf'; -import { MessageParams } from '../../Message'; -import { ContentMessage } from '../..'; - -interface ReceiptMessageParams extends MessageParams { - timestamps: Array; -} -export abstract class ReceiptMessage extends ContentMessage { - public readonly timestamps: Array; - - constructor({ timestamp, identifier, timestamps }: ReceiptMessageParams) { - super({ timestamp, identifier }); - this.timestamps = timestamps; - } - - public abstract getReceiptType(): SignalService.ReceiptMessage.Type; - - public contentProto(): SignalService.Content { - return new SignalService.Content({ - receiptMessage: this.receiptProto(), - }); - } - - protected receiptProto(): SignalService.ReceiptMessage { - return new SignalService.ReceiptMessage({ - type: this.getReceiptType(), - timestamp: this.timestamps, - }); - } -} diff --git a/ts/session/messages/outgoing/preconditions.ts b/ts/session/messages/outgoing/preconditions.ts new file mode 100644 index 0000000000..8a29e421ff --- /dev/null +++ b/ts/session/messages/outgoing/preconditions.ts @@ -0,0 +1,37 @@ +import { isEmpty } from 'lodash'; +import { PubKey } from '../../types'; +import { PreConditionFailed } from '../../utils/errors'; + +function checkUin8tArrayOrThrow({ + context, + data, + expectedLength, + varName, +}: { + data: Uint8Array; + expectedLength: number; + varName: string; + context: string; +}) { + if (isEmpty(data) || data.length !== expectedLength) { + throw new PreConditionFailed( + `${varName} length should be ${expectedLength} for ctx:"${context}"` + ); + } +} + +function checkArrayHaveOnly05Pubkeys({ + context, + arr, + varName, +}: { + arr: Array; + varName: string; + context: string; +}) { + if (arr.some(v => !PubKey.is05Pubkey(v))) { + throw new PreConditionFailed(`${varName} did not contain only 05 pubkeys for ctx:"${context}"`); + } +} + +export const Preconditions = { checkUin8tArrayOrThrow, checkArrayHaveOnly05Pubkeys }; diff --git a/ts/session/messages/outgoing/visibleMessage/ClosedGroupVisibleMessage.ts b/ts/session/messages/outgoing/visibleMessage/ClosedGroupVisibleMessage.ts index 589ea63610..7e60bc8f57 100644 --- a/ts/session/messages/outgoing/visibleMessage/ClosedGroupVisibleMessage.ts +++ b/ts/session/messages/outgoing/visibleMessage/ClosedGroupVisibleMessage.ts @@ -1,6 +1,9 @@ +import { GroupPubkeyType } from 'libsession_util_nodejs'; import { SignalService } from '../../../../protobuf'; +import { SnodeNamespaces } from '../../../apis/snode_api/namespaces'; import { PubKey } from '../../../types'; import { StringUtils } from '../../../utils'; +import { DataMessage } from '../DataMessage'; import { ClosedGroupMessage, ClosedGroupMessageParams, @@ -8,8 +11,11 @@ import { import { VisibleMessage } from './VisibleMessage'; interface ClosedGroupVisibleMessageParams - extends Omit { - groupId: PubKey; + extends Omit< + ClosedGroupMessageParams, + 'expireTimer' | 'expirationType' | 'identifier' | 'createAtNetworkTimestamp' + > { + groupId: string; chatMessage: VisibleMessage; } @@ -18,20 +24,27 @@ export class ClosedGroupVisibleMessage extends ClosedGroupMessage { constructor(params: ClosedGroupVisibleMessageParams) { super({ - timestamp: params.chatMessage.timestamp, - identifier: params.identifier ?? params.chatMessage.identifier, + createAtNetworkTimestamp: params.chatMessage.createAtNetworkTimestamp, + identifier: params.chatMessage.identifier ?? params.chatMessage.identifier, groupId: params.groupId, expirationType: params.chatMessage.expirationType, expireTimer: params.chatMessage.expireTimer, }); this.chatMessage = params.chatMessage; + if ( + this.chatMessage.expirationType !== 'deleteAfterSend' && + this.chatMessage.expirationType !== 'unknown' && + this.chatMessage.expirationType !== null + ) { + throw new Error('group visible msg only support DaS and off Disappearing options'); + } if (!params.groupId) { throw new Error('ClosedGroupVisibleMessage: groupId must be set'); } - if (PubKey.isClosedGroupV3(PubKey.cast(params.groupId).key)) { + if (PubKey.is03Pubkey(PubKey.cast(params.groupId).key)) { throw new Error('GroupContext should not be used anymore with closed group v3'); } } @@ -53,3 +66,35 @@ export class ClosedGroupVisibleMessage extends ClosedGroupMessage { return dataProto; } } + +type WithDestinationGroupPk = { destination: GroupPubkeyType }; + +export class ClosedGroupV2VisibleMessage extends DataMessage { + private readonly chatMessage: VisibleMessage; + public readonly destination: GroupPubkeyType; + public readonly namespace = SnodeNamespaces.ClosedGroupMessages; + + constructor( + params: Pick & WithDestinationGroupPk + ) { + super(params.chatMessage); + this.chatMessage = params.chatMessage; + if ( + this.chatMessage.expirationType !== 'deleteAfterSend' && + this.chatMessage.expirationType !== 'unknown' + ) { + throw new Error('groupv2 message only support DaS and off Disappearing options'); + } + + if (!PubKey.is03Pubkey(params.destination)) { + throw new Error('ClosedGroupV2VisibleMessage only work with 03-groups destination'); + } + this.destination = params.destination; + } + + public dataProto(): SignalService.DataMessage { + // expireTimer is set in the dataProto in this call directly + const dataProto = this.chatMessage.dataProto(); + return dataProto; + } +} diff --git a/ts/session/messages/outgoing/visibleMessage/GroupInvitationMessage.ts b/ts/session/messages/outgoing/visibleMessage/GroupInvitationMessage.ts index de47d3cbf3..e647a29834 100644 --- a/ts/session/messages/outgoing/visibleMessage/GroupInvitationMessage.ts +++ b/ts/session/messages/outgoing/visibleMessage/GroupInvitationMessage.ts @@ -1,18 +1,19 @@ import { SignalService } from '../../../../protobuf'; -import { VisibleMessage, VisibleMessageParams } from './VisibleMessage'; +import { DataMessage } from '../DataMessage'; +import { ExpirableMessageParams } from '../ExpirableMessage'; -interface GroupInvitationMessageParams extends VisibleMessageParams { +interface GroupInvitationMessageParams extends ExpirableMessageParams { url: string; name: string; } -export class GroupInvitationMessage extends VisibleMessage { +export class GroupInvitationMessage extends DataMessage { private readonly url: string; private readonly name: string; constructor(params: GroupInvitationMessageParams) { super({ - timestamp: params.timestamp, + createAtNetworkTimestamp: params.createAtNetworkTimestamp, identifier: params.identifier, expirationType: params.expirationType, expireTimer: params.expireTimer, @@ -28,7 +29,6 @@ export class GroupInvitationMessage extends VisibleMessage { }); return new SignalService.DataMessage({ - ...super.dataProto(), openGroupInvitation, }); } diff --git a/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts b/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts index e8d8e44b82..a1b186e81a 100644 --- a/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts +++ b/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts @@ -2,8 +2,9 @@ import ByteBuffer from 'bytebuffer'; import { isEmpty } from 'lodash'; import { SignalService } from '../../../../protobuf'; import { Reaction } from '../../../../types/Reaction'; +import { DataMessage } from '../DataMessage'; import { LokiProfile } from '../../../../types/message'; -import { ExpirableMessage, ExpirableMessageParams } from '../ExpirableMessage'; +import { ExpirableMessageParams } from '../ExpirableMessage'; interface AttachmentPointerCommon { contentType?: string; @@ -71,7 +72,7 @@ export interface VisibleMessageParams extends ExpirableMessageParams { syncTarget?: string; // undefined means it is not a synced message } -export class VisibleMessage extends ExpirableMessage { +export class VisibleMessage extends DataMessage { public readonly reaction?: Reaction; private readonly attachments?: Array; @@ -87,7 +88,7 @@ export class VisibleMessage extends ExpirableMessage { constructor(params: VisibleMessageParams) { super({ - timestamp: params.timestamp, + createAtNetworkTimestamp: params.createAtNetworkTimestamp, identifier: params.identifier, expirationType: params.expirationType, expireTimer: params.expireTimer, @@ -113,7 +114,7 @@ export class VisibleMessage extends ExpirableMessage { } public dataProto(): SignalService.DataMessage { - const dataMessage = super.dataProto(); + const dataMessage = new SignalService.DataMessage({}); if (this.body) { dataMessage.body = this.body; @@ -178,13 +179,14 @@ export class VisibleMessage extends ExpirableMessage { }); } - dataMessage.timestamp = this.timestamp; - return dataMessage; } public isEqual(comparator: VisibleMessage): boolean { - return this.identifier === comparator.identifier && this.timestamp === comparator.timestamp; + return ( + this.identifier === comparator.identifier && + this.createAtNetworkTimestamp === comparator.createAtNetworkTimestamp + ); } } diff --git a/ts/session/onions/onionPath.ts b/ts/session/onions/onionPath.ts index ee0d9f2800..a7b792153c 100644 --- a/ts/session/onions/onionPath.ts +++ b/ts/session/onions/onionPath.ts @@ -10,17 +10,16 @@ import { Data } from '../../data/data'; import { Snode } from '../../data/types'; import { updateOnionPaths } from '../../state/ducks/onion'; import { APPLICATION_JSON } from '../../types/MIME'; -import { Onions, snodeHttpsAgent } from '../apis/snode_api/onions'; import { ERROR_CODE_NO_CONNECT } from '../apis/snode_api/SNodeAPI'; -import * as SnodePool from '../apis/snode_api/snodePool'; +import { Onions, snodeHttpsAgent } from '../apis/snode_api/onions'; + import { DURATION } from '../constants'; import { UserUtils } from '../utils'; import { allowOnlyOneAtATime } from '../utils/Promise'; import { ed25519Str } from '../utils/String'; - -export const desiredGuardCount = 2; -export const minimumGuardCount = 1; -export const ONION_REQUEST_HOPS = 3; +import { SnodePool } from '../apis/snode_api/snodePool'; +import { SnodePoolConstants } from '../apis/snode_api/snodePoolConstants'; +import { desiredGuardCount, minimumGuardCount, ONION_REQUEST_HOPS } from './onionPathConstants'; export function getOnionPathMinTimeout() { return DURATION.SECONDS; @@ -109,23 +108,23 @@ export async function dropSnodeFromPath(snodeEd25519: string) { // make a copy now so we don't alter the real one while doing stuff here const oldPaths = _.cloneDeep(onionPaths); - let pathtoPatchUp = oldPaths[pathWithSnodeIndex]; + let pathToPatchUp = oldPaths[pathWithSnodeIndex]; // remove the snode causing issue from this path - const nodeToRemoveIndex = pathtoPatchUp.findIndex(snode => snode.pubkey_ed25519 === snodeEd25519); + const nodeToRemoveIndex = pathToPatchUp.findIndex(snode => snode.pubkey_ed25519 === snodeEd25519); // this should not happen, but well... if (nodeToRemoveIndex === -1) { return; } - pathtoPatchUp = pathtoPatchUp.filter(snode => snode.pubkey_ed25519 !== snodeEd25519); + pathToPatchUp = pathToPatchUp.filter(snode => snode.pubkey_ed25519 !== snodeEd25519); const ed25519KeysToExclude = _.flattenDeep(oldPaths).map(m => m.pubkey_ed25519); // this call throws if it cannot return a valid snode. const snodeToAppendToPath = await SnodePool.getRandomSnode(ed25519KeysToExclude); // Don't test the new snode as this would reveal the user's IP - pathtoPatchUp.push(snodeToAppendToPath); - onionPaths[pathWithSnodeIndex] = pathtoPatchUp; + pathToPatchUp.push(snodeToAppendToPath); + onionPaths[pathWithSnodeIndex] = pathToPatchUp; } export async function getOnionPath({ toExclude }: { toExclude?: Snode }): Promise> { @@ -316,7 +315,7 @@ export async function testGuardNode(snode: Snode) { response = await insecureNodeFetch(url, fetchOptions); } catch (e) { if (e.type === 'request-timeout') { - window?.log?.warn('test :,', ed25519Str(snode.pubkey_ed25519)); + window?.log?.warn('testGuardNode request timed out for:', ed25519Str(snode.pubkey_ed25519)); } if (e.code === 'ENETUNREACH') { window?.log?.warn('no network on node,', snode); @@ -343,7 +342,7 @@ export async function selectGuardNodes(): Promise> { const nodePool = await SnodePool.getSnodePoolFromDBOrFetchFromSeed(); window.log.info(`selectGuardNodes snodePool length: ${nodePool.length}`); - if (nodePool.length < SnodePool.minSnodePoolCount) { + if (nodePool.length < SnodePoolConstants.minSnodePoolCount) { window?.log?.error( `Could not select guard nodes. Not enough nodes in the pool: ${nodePool.length}` ); @@ -392,8 +391,8 @@ export async function selectGuardNodes(): Promise> { guardNodes = selectedGuardNodes.slice(0, desiredGuardCount); if (guardNodes.length < desiredGuardCount) { - window?.log?.error(`Cound't get enough guard nodes, only have: ${guardNodes.length}`); - throw new Error(`Cound't get enough guard nodes, only have: ${guardNodes.length}`); + window?.log?.error(`Couldn't get enough guard nodes, only have: ${guardNodes.length}`); + throw new Error(`Couldn't get enough guard nodes, only have: ${guardNodes.length}`); } await internalUpdateGuardNodes(guardNodes); @@ -449,7 +448,7 @@ async function buildNewOnionPathsWorker() { // get an up to date list of snodes from cache, from db, or from the a seed node. let allNodes = await SnodePool.getSnodePoolFromDBOrFetchFromSeed(); - if (allNodes.length <= SnodePool.minSnodePoolCount) { + if (allNodes.length <= SnodePoolConstants.minSnodePoolCount) { throw new Error(`Cannot rebuild path as we do not have enough snodes: ${allNodes.length}`); } @@ -463,7 +462,7 @@ async function buildNewOnionPathsWorker() { `SessionSnodeAPI::buildNewOnionPaths, snodePool length: ${allNodes.length}` ); // get all snodes minus the selected guardNodes - if (allNodes.length <= SnodePool.minSnodePoolCount) { + if (allNodes.length <= SnodePoolConstants.minSnodePoolCount) { throw new Error('Too few nodes to build an onion path. Even after fetching from seed.'); } @@ -477,7 +476,7 @@ async function buildNewOnionPathsWorker() { return _.fill(Array(group.length), _.sample(group) as Snode); }) ); - if (oneNodeForEachSubnet24KeepingRatio.length <= SnodePool.minSnodePoolCount) { + if (oneNodeForEachSubnet24KeepingRatio.length <= SnodePoolConstants.minSnodePoolCount) { throw new Error( 'Too few nodes "unique by ip" to build an onion path. Even after fetching from seed.' ); diff --git a/ts/session/onions/onionPathConstants.ts b/ts/session/onions/onionPathConstants.ts new file mode 100644 index 0000000000..d40d8485b0 --- /dev/null +++ b/ts/session/onions/onionPathConstants.ts @@ -0,0 +1,3 @@ +export const desiredGuardCount = 2; +export const minimumGuardCount = 1; +export const ONION_REQUEST_HOPS = 3; diff --git a/ts/session/onions/onionSend.ts b/ts/session/onions/onionSend.ts index eb14f21c8c..ec2bb53737 100644 --- a/ts/session/onions/onionSend.ts +++ b/ts/session/onions/onionSend.ts @@ -17,7 +17,6 @@ import { FinalRelayOptions, Onions, STATUS_NO_STATUS, - SnodeResponse, buildErrorMessageWithFailedCode, } from '../apis/snode_api/onions'; import { PROTOCOLS } from '../constants'; @@ -75,12 +74,6 @@ const getOnionPathForSending = async () => { return pathNodes; }; -export type OnionSnodeResponse = { - result: SnodeResponse; - txtResponse: string; - response: string; -}; - export type OnionV4SnodeResponse = { body: string | object | null; // if the content can be decoded as string bodyBinary: Uint8Array | null; // otherwise we return the raw content (could be an image data or file from sogs/fileserver) @@ -159,7 +152,7 @@ const sendViaOnionV4ToNonSnodeWithRetries = async ( * call above will call us again with the same params but a different path. * If the error is not recoverable, it throws a pRetry.AbortError. */ - const onionV4Response = await Onions.sendOnionRequestHandlingSnodeEject({ + const onionV4Response = await Onions.sendOnionRequestHandlingSnodeEjectNoRetries({ nodePath: pathNodes, destSnodeX25519: destinationX25519Key, finalDestOptions: payloadObj, @@ -167,11 +160,12 @@ const sendViaOnionV4ToNonSnodeWithRetries = async ( abortSignal, useV4: true, throwErrors, + allow401s: false, }); if (window.sessionFeatureFlags?.debug.debugNonSnodeRequests) { window.log.info( - 'sendViaOnionV4ToNonSnodeWithRetries: sendOnionRequestHandlingSnodeEject returned: ', + 'sendViaOnionV4ToNonSnodeWithRetries: sendOnionRequestHandlingSnodeEjectNoRetries returned: ', JSON.stringify(onionV4Response) ); } @@ -236,7 +230,7 @@ const sendViaOnionV4ToNonSnodeWithRetries = async ( }, { retries: 2, // retry 3 (2+1) times at most - minTimeout: 100, + minTimeout: OnionSending.getMinTimeoutForSogs(), onFailedAttempt: e => { window?.log?.warn( `sendViaOnionV4ToNonSnodeWithRetries attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left...: ${e.message}` @@ -526,6 +520,13 @@ async function sendJsonViaOnionV4ToFileServer(sendOptions: { return res as OnionV4JSONSnodeResponse; } +/** + * This is used during stubbing so we can override the time between retries (so the unit tests are faster) + */ +function getMinTimeoutForSogs() { + return 100; +} + // we export these methods for stubbing during testing export const OnionSending = { endpointRequiresDecoding, @@ -537,4 +538,5 @@ export const OnionSending = { sendBinaryViaOnionV4ToSogs, getBinaryViaOnionV4FromFileServer, sendJsonViaOnionV4ToFileServer, + getMinTimeoutForSogs, }; diff --git a/ts/session/profile_manager/ProfileManager.ts b/ts/session/profile_manager/ProfileManager.ts index 3b321cac7f..25d7b305da 100644 --- a/ts/session/profile_manager/ProfileManager.ts +++ b/ts/session/profile_manager/ProfileManager.ts @@ -1,7 +1,7 @@ import { isEmpty, isNil } from 'lodash'; +import { ConvoHub } from '../conversations'; import { setLastProfileUpdateTimestamp } from '../../util/storage'; import { UserConfigWrapperActions } from '../../webworker/workers/browser/libsession_worker_interface'; -import { getConversationController } from '../conversations'; import { SyncUtils, UserUtils } from '../utils'; import { fromHexToArray, sanitizeSessionUsername, toHex } from '../utils/String'; import { AvatarDownload } from '../utils/job_runners/jobs/AvatarDownloadJob'; @@ -19,16 +19,15 @@ export type Profile = { */ async function updateOurProfileSync({ displayName, profileUrl, profileKey, priority }: Profile) { const us = UserUtils.getOurPubKeyStrFromCache(); - const ourConvo = getConversationController().get(us); + const ourConvo = ConvoHub.use().get(us); if (!ourConvo?.id) { window?.log?.warn('[profileupdate] Cannot update our profile without convo associated'); return; } await updateProfileOfContact(us, displayName, profileUrl, profileKey); - if (priority !== null && ourConvo.get('priority') !== priority) { - ourConvo.set('priority', priority); - await ourConvo.commit(); + if (priority !== null) { + await ourConvo.setPriorityFromWrapper(priority, true); } } @@ -41,14 +40,14 @@ async function updateProfileOfContact( profileUrl: string | null | undefined, profileKey: Uint8Array | null | undefined ) { - const conversation = getConversationController().get(pubkey); - // TODO we should make sure that this function does not get call directly when `updateOurProfileSync` should be called instead. I.e. for avatars received in messages from ourself + const conversation = ConvoHub.use().get(pubkey); + if (!conversation || !conversation.isPrivate()) { window.log.warn('updateProfileOfContact can only be used for existing and private convos'); return; } let changes = false; - const existingDisplayName = conversation.get('displayNameInProfile'); + const existingDisplayName = conversation.getRealSessionUsername(); // avoid setting the display name to an invalid value if (existingDisplayName !== displayName && !isEmpty(displayName)) { @@ -60,8 +59,8 @@ async function updateProfileOfContact( let avatarChanged = false; // trust whatever we get as an update. It either comes from a shared config wrapper or one of that user's message. But in any case we should trust it, even if it gets resetted. - const prevPointer = conversation.get('avatarPointer'); - const prevProfileKey = conversation.get('profileKey'); + const prevPointer = conversation.getAvatarPointer(); + const prevProfileKey = conversation.getProfileKey(); // we have to set it right away and not in the async download job, as the next .commit will save it to the // database and wrapper (and we do not want to override anything in the wrapper's content @@ -128,7 +127,7 @@ async function updateOurProfileDisplayNameOnboarding(newName: string) { async function updateOurProfileDisplayName(newName: string) { const ourNumber = UserUtils.getOurPubKeyStrFromCache(); - const conversation = await getConversationController().getOrCreateAndWait( + const conversation = await ConvoHub.use().getOrCreateAndWait( ourNumber, ConversationTypeEnum.PRIVATE ); diff --git a/ts/session/sending/MessageQueue.ts b/ts/session/sending/MessageQueue.ts index c7a3207cbd..7a588f6543 100644 --- a/ts/session/sending/MessageQueue.ts +++ b/ts/session/sending/MessageQueue.ts @@ -1,10 +1,10 @@ import { AbortController } from 'abort-controller'; +import { PubkeyType } from 'libsession_util_nodejs'; import { MessageSender } from '.'; -import { ConfigurationMessage } from '../messages/outgoing/controlMessage/ConfigurationMessage'; import { ClosedGroupMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupMessage'; import { ClosedGroupNameChangeMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupNameChangeMessage'; -import { PubKey, RawMessage } from '../types'; +import { OutgoingRawMessage, PubKey } from '../types'; import { JobQueue, MessageUtils, UserUtils } from '../utils'; import { PendingMessageCache } from './PendingMessageCache'; @@ -13,9 +13,11 @@ import { ExpirationTimerUpdateMessage } from '../messages/outgoing/controlMessag import { ClosedGroupAddedMembersMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupAddedMembersMessage'; import { ClosedGroupEncryptionPairMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupEncryptionPairMessage'; import { ClosedGroupMemberLeftMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupMemberLeftMessage'; -import { ClosedGroupNewMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupNewMessage'; import { ClosedGroupRemovedMembersMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupRemovedMembersMessage'; -import { ClosedGroupVisibleMessage } from '../messages/outgoing/visibleMessage/ClosedGroupVisibleMessage'; +import { + ClosedGroupV2VisibleMessage, + ClosedGroupVisibleMessage, +} from '../messages/outgoing/visibleMessage/ClosedGroupVisibleMessage'; import { SyncMessageType } from '../utils/sync/syncUtils'; import { MessageSentHandler } from './MessageSentHandler'; @@ -23,28 +25,26 @@ import { OpenGroupMessageV2 } from '../apis/open_group_api/opengroupV2/OpenGroup import { sendSogsReactionOnionV4 } from '../apis/open_group_api/sogsv3/sogsV3SendReaction'; import { SnodeNamespaces, - SnodeNamespacesGroup, + SnodeNamespacesLegacyGroup, SnodeNamespacesUser, } from '../apis/snode_api/namespaces'; import { CallMessage } from '../messages/outgoing/controlMessage/CallMessage'; -import { SharedConfigMessage } from '../messages/outgoing/controlMessage/SharedConfigMessage'; +import { DataExtractionNotificationMessage } from '../messages/outgoing/controlMessage/DataExtractionNotificationMessage'; +import { TypingMessage } from '../messages/outgoing/controlMessage/TypingMessage'; import { UnsendMessage } from '../messages/outgoing/controlMessage/UnsendMessage'; +import { ClosedGroupNewMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupNewMessage'; +import { GroupUpdateDeleteMemberContentMessage } from '../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateDeleteMemberContentMessage'; +import { GroupUpdateInfoChangeMessage } from '../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInfoChangeMessage'; +import { GroupUpdateMemberChangeMessage } from '../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberChangeMessage'; +import { GroupUpdateMemberLeftMessage } from '../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberLeftMessage'; +import { GroupUpdateInviteMessage } from '../messages/outgoing/controlMessage/group_v2/to_user/GroupUpdateInviteMessage'; +import { GroupUpdatePromoteMessage } from '../messages/outgoing/controlMessage/group_v2/to_user/GroupUpdatePromoteMessage'; import { OpenGroupVisibleMessage } from '../messages/outgoing/visibleMessage/OpenGroupVisibleMessage'; import { OpenGroupRequestCommonType } from '../../data/types'; -type ClosedGroupMessageType = - | ClosedGroupVisibleMessage - | ClosedGroupAddedMembersMessage - | ClosedGroupRemovedMembersMessage - | ClosedGroupNameChangeMessage - | ClosedGroupMemberLeftMessage - | ExpirationTimerUpdateMessage - | ClosedGroupEncryptionPairMessage - | UnsendMessage; - // ClosedGroupEncryptionPairReplyMessage must be sent to a user pubkey. Not a group. -export class MessageQueue { +export class MessageQueueCl { private readonly jobQueues: Map = new Map(); private readonly pendingMessageCache: PendingMessageCache; @@ -57,10 +57,10 @@ export class MessageQueue { destinationPubKey: PubKey, message: ContentMessage, namespace: SnodeNamespaces, - sentCb?: (message: RawMessage) => Promise, + sentCb?: (message: OutgoingRawMessage) => Promise, isGroup = false ): Promise { - if (message instanceof ConfigurationMessage || !!(message as any).syncTarget) { + if ((message as any).syncTarget) { throw new Error('SyncMessage needs to be sent with sendSyncMessage'); } await this.process(destinationPubKey, message, namespace, sentCb, isGroup); @@ -85,7 +85,7 @@ export class MessageQueue { blinded: boolean; filesToLink: Array; }) { - // Skipping the queue for Open Groups v2; the message is sent directly + // Skipping the MessageQueue for Open Groups v2; the message is sent directly try { // NOTE Reactions are handled separately @@ -121,7 +121,7 @@ export class MessageQueue { `Failed to send message to open group: ${roomInfos.serverUrl}:${roomInfos.roomId}:`, e ); - await MessageSentHandler.handleMessageSentFailure( + await MessageSentHandler.handlePublicMessageSentFailure( message, e || new Error('Failed to send message to open group.') ); @@ -161,7 +161,7 @@ export class MessageQueue { `Failed to send message to open group: ${roomInfos.serverUrl}:${roomInfos.roomId}:`, e.message ); - await MessageSentHandler.handleMessageSentFailure( + await MessageSentHandler.handlePublicMessageSentFailure( message, e || new Error('Failed to send message to open group.') ); @@ -178,9 +178,17 @@ export class MessageQueue { groupPubKey, sentCb, }: { - message: ClosedGroupMessageType; - namespace: SnodeNamespacesGroup; - sentCb?: (message: RawMessage) => Promise; + message: + | ClosedGroupVisibleMessage + | ClosedGroupAddedMembersMessage + | ClosedGroupRemovedMembersMessage + | ClosedGroupNameChangeMessage + | ClosedGroupMemberLeftMessage + | ExpirationTimerUpdateMessage + | ClosedGroupEncryptionPairMessage + | UnsendMessage; + namespace: SnodeNamespacesLegacyGroup; + sentCb?: (message: OutgoingRawMessage) => Promise; groupPubKey?: PubKey; }): Promise { let destinationPubKey: PubKey | undefined = groupPubKey; @@ -196,6 +204,74 @@ export class MessageQueue { return this.sendToPubKey(PubKey.cast(destinationPubKey), message, namespace, sentCb, true); } + public async sendToGroupV2({ + message, + sentCb, + }: { + message: + | ClosedGroupV2VisibleMessage + | GroupUpdateMemberChangeMessage + | GroupUpdateInfoChangeMessage + | GroupUpdateDeleteMemberContentMessage + | GroupUpdateMemberLeftMessage; + sentCb?: (message: OutgoingRawMessage) => Promise; + }): Promise { + if (!message.destination) { + throw new Error('Invalid group message passed in sendToGroupV2.'); + } + + return this.sendToPubKey( + PubKey.cast(message.destination), + message, + message.namespace, + sentCb, + true + ); + } + + public async sendToGroupV2NonDurably({ + message, + }: { + message: + | ClosedGroupV2VisibleMessage + | GroupUpdateMemberChangeMessage + | GroupUpdateInfoChangeMessage + | GroupUpdateDeleteMemberContentMessage + | GroupUpdateMemberLeftMessage; + }) { + if (!message.destination || !PubKey.is03Pubkey(message.destination)) { + throw new Error('Invalid group message passed in sendToGroupV2NonDurably.'); + } + + return this.sendToPubKeyNonDurably({ + message, + namespace: message.namespace, + pubkey: PubKey.cast(message.destination), + isSyncMessage: false, + }); + } + + public async sendToLegacyGroupNonDurably({ + message, + namespace, + destination, + }: { + message: ClosedGroupMemberLeftMessage; + namespace: SnodeNamespaces.LegacyClosedGroup; + destination: PubkeyType; + }) { + if (!destination || !PubKey.is05Pubkey(destination)) { + throw new Error('Invalid legacy group message passed in sendToLegacyGroupNonDurably.'); + } + + return this.sendToPubKeyNonDurably({ + message, + namespace, + pubkey: PubKey.cast(destination), + isSyncMessage: false, + }); + } + public async sendSyncMessage({ namespace, message, @@ -203,17 +279,12 @@ export class MessageQueue { }: { namespace: SnodeNamespacesUser; message?: SyncMessageType; - sentCb?: (message: RawMessage) => Promise; + sentCb?: (message: OutgoingRawMessage) => Promise; }): Promise { if (!message) { return; } - if ( - !(message instanceof ConfigurationMessage) && - !(message instanceof UnsendMessage) && - !(message instanceof SharedConfigMessage) && - !(message as any)?.syncTarget - ) { + if (!(message instanceof UnsendMessage) && !(message as any)?.syncTarget) { throw new Error('Invalid message given to sendSyncMessage'); } @@ -222,41 +293,83 @@ export class MessageQueue { } /** - * Sends a message that awaits until the message is completed sending + * Send a message to a 1o1 swarm * @param user user pub key to send to * @param message Message to be sent */ - public async sendToPubKeyNonDurably({ + public async sendTo1o1NonDurably({ namespace, message, pubkey, }: { pubkey: PubKey; message: - | ClosedGroupNewMessage + | TypingMessage // no point of caching the typing message, they are very short lived + | DataExtractionNotificationMessage | CallMessage - | SharedConfigMessage - | ClosedGroupMemberLeftMessage; + | ClosedGroupNewMessage + | GroupUpdateInviteMessage + | GroupUpdatePromoteMessage; + namespace: SnodeNamespaces.Default; + }): Promise { + return this.sendToPubKeyNonDurably({ message, namespace, pubkey, isSyncMessage: false }); + } + + /** + * Sends a message that awaits until the message is completed sending + * @param user user pub key to send to + * @param message Message to be sent + */ + private async sendToPubKeyNonDurably({ + namespace, + message, + pubkey, + isSyncMessage, + }: { + pubkey: PubKey; + message: ContentMessage; namespace: SnodeNamespaces; - }): Promise { - let rawMessage; + isSyncMessage: boolean; + }): Promise { + const rawMessage = await MessageUtils.toRawMessage(pubkey, message, namespace); + return this.sendSingleMessageAndHandleResult({ rawMessage, isSyncMessage }); + } + + private async sendSingleMessageAndHandleResult({ + rawMessage, + isSyncMessage, + }: { + rawMessage: OutgoingRawMessage; + isSyncMessage: boolean; + }) { try { - rawMessage = await MessageUtils.toRawMessage(pubkey, message, namespace); - const { wrappedEnvelope, effectiveTimestamp } = await MessageSender.send({ + const { effectiveTimestamp } = await MessageSender.sendSingleMessage({ message: rawMessage, - isSyncMessage: false, + isSyncMessage, }); - await MessageSentHandler.handleMessageSentSuccess( - rawMessage, - effectiveTimestamp, - wrappedEnvelope - ); + + const cb = this.pendingMessageCache.callbacks.get(rawMessage.identifier); + + if (cb) { + await cb(rawMessage); + } + this.pendingMessageCache.callbacks.delete(rawMessage.identifier); + return effectiveTimestamp; } catch (error) { - if (rawMessage) { - await MessageSentHandler.handleMessageSentFailure(rawMessage, error); - } - return false; + window.log.error( + 'sendSingleMessageAndHandleResult: failed to send message with: ', + error.message + ); + await MessageSentHandler.handleSwarmMessageSentFailure( + { device: rawMessage.device, identifier: rawMessage.identifier }, + error + ); + + return null; + } finally { + // Remove from the cache because retrying is done in the sender + void this.pendingMessageCache.remove(rawMessage); } } @@ -275,30 +388,7 @@ export class MessageQueue { if (!jobQueue.has(messageId)) { // We put the event handling inside this job to avoid sending duplicate events const job = async () => { - try { - const { wrappedEnvelope, effectiveTimestamp } = await MessageSender.send({ - message, - isSyncMessage, - }); - - await MessageSentHandler.handleMessageSentSuccess( - message, - effectiveTimestamp, - wrappedEnvelope - ); - - const cb = this.pendingMessageCache.callbacks.get(message.identifier); - - if (cb) { - await cb(message); - } - this.pendingMessageCache.callbacks.delete(message.identifier); - } catch (error) { - void MessageSentHandler.handleMessageSentFailure(message, error); - } finally { - // Remove from the cache because retrying is done in the sender - void this.pendingMessageCache.remove(message); - } + await this.sendSingleMessageAndHandleResult({ rawMessage: message, isSyncMessage }); }; await jobQueue.addWithId(messageId, job); } @@ -306,7 +396,7 @@ export class MessageQueue { } /** - * This method should be called when the app is started and the user loggedin to fetch + * This method should be called when the app is started and the user logged in to fetch * existing message waiting to be sent in the cache of message */ public async processAllPending() { @@ -323,14 +413,13 @@ export class MessageQueue { destinationPk: PubKey, message: ContentMessage, namespace: SnodeNamespaces, - sentCb?: (message: RawMessage) => Promise, + sentCb?: (message: OutgoingRawMessage) => Promise, isGroup = false ): Promise { // Don't send to ourselves - const us = UserUtils.getOurPubKeyFromCache(); let isSyncMessage = false; - if (us && destinationPk.isEqual(us)) { - // We allow a message for ourselves only if it's a ConfigurationMessage, a ClosedGroupNewMessage, + if (UserUtils.isUsFromCache(destinationPk)) { + // We allow a message for ourselves only if it's a ClosedGroupNewMessage, // or a message with a syncTarget set. if (MessageSender.isContentSyncMessage(message)) { @@ -357,11 +446,15 @@ export class MessageQueue { } } -let messageQueue: MessageQueue; +let messageQueueSingleton: MessageQueueCl; -export function getMessageQueue(): MessageQueue { - if (!messageQueue) { - messageQueue = new MessageQueue(); +function use(): MessageQueueCl { + if (!messageQueueSingleton) { + messageQueueSingleton = new MessageQueueCl(); } - return messageQueue; + return messageQueueSingleton; } + +export const MessageQueue = { + use, +}; diff --git a/ts/session/sending/MessageSender.ts b/ts/session/sending/MessageSender.ts index 8d27dde96d..2f446cc556 100644 --- a/ts/session/sending/MessageSender.ts +++ b/ts/session/sending/MessageSender.ts @@ -1,86 +1,72 @@ // REMOVE COMMENT AFTER: This can just export pure functions as it doesn't need state import { AbortController } from 'abort-controller'; -import ByteBuffer from 'bytebuffer'; -import _, { isEmpty, isNil, isNumber, isString, sample, toNumber } from 'lodash'; +import { GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; +import { isArray, isEmpty, isNumber, isString } from 'lodash'; import pRetry from 'p-retry'; -import { Data } from '../../data/data'; -import { SignalService } from '../../protobuf'; +import { Data, SeenMessageHashes } from '../../data/data'; +import { UserGroupsWrapperActions } from '../../webworker/workers/browser/libsession_worker_interface'; import { OpenGroupMessageV2 } from '../apis/open_group_api/opengroupV2/OpenGroupMessageV2'; import { sendMessageOnionV4BlindedRequest, sendSogsMessageOnionV4, } from '../apis/open_group_api/sogsv3/sogsV3SendMessage'; import { + BuiltSnodeSubRequests, + DeleteAllFromGroupMsgNodeSubRequest, + DeleteHashesFromGroupNodeSubRequest, + DeleteHashesFromUserNodeSubRequest, + MethodBatchType, NotEmptyArrayOfBatchResults, - StoreOnNodeMessage, - StoreOnNodeParams, - StoreOnNodeParamsNoSig, + RawSnodeSubRequests, + StoreGroupInfoSubRequest, + StoreGroupKeysSubRequest, + StoreGroupMembersSubRequest, + StoreGroupMessageSubRequest, + StoreGroupRevokedRetrievableSubRequest, + StoreLegacyGroupMessageSubRequest, + StoreUserConfigSubRequest, + StoreUserMessageSubRequest, + SubaccountRevokeSubRequest, + SubaccountUnrevokeSubRequest, } from '../apis/snode_api/SnodeRequestTypes'; +import { BatchRequests } from '../apis/snode_api/batchRequest'; import { GetNetworkTime } from '../apis/snode_api/getNetworkTime'; import { SnodeNamespace, SnodeNamespaces } from '../apis/snode_api/namespaces'; -import { getSwarmFor } from '../apis/snode_api/snodePool'; -import { SnodeSignature, SnodeSignatureResult } from '../apis/snode_api/snodeSignatures'; -import { SnodeAPIStore } from '../apis/snode_api/storeMessage'; -import { getConversationController } from '../conversations'; -import { MessageEncrypter } from '../crypto'; +import { + SigResultAdmin, + SigResultSubAccount, + SnodeGroupSignature, +} from '../apis/snode_api/signature/groupSignature'; +import { SnodeSignature, SnodeSignatureResult } from '../apis/snode_api/signature/snodeSignatures'; +import { SnodePool } from '../apis/snode_api/snodePool'; +import { TTL_DEFAULT } from '../constants'; +import { ConvoHub } from '../conversations'; import { addMessagePadding } from '../crypto/BufferPadding'; import { ContentMessage } from '../messages/outgoing'; -import { ConfigurationMessage } from '../messages/outgoing/controlMessage/ConfigurationMessage'; -import { SharedConfigMessage } from '../messages/outgoing/controlMessage/SharedConfigMessage'; import { UnsendMessage } from '../messages/outgoing/controlMessage/UnsendMessage'; import { ClosedGroupNewMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupNewMessage'; import { OpenGroupVisibleMessage } from '../messages/outgoing/visibleMessage/OpenGroupVisibleMessage'; import { PubKey } from '../types'; -import { RawMessage } from '../types/RawMessage'; +import { OutgoingRawMessage } from '../types/RawMessage'; import { UserUtils } from '../utils'; import { ed25519Str, fromUInt8ArrayToBase64 } from '../utils/String'; -import { EmptySwarmError } from '../utils/errors'; +import { MessageSentHandler } from './MessageSentHandler'; +import { EncryptAndWrapMessageResults, MessageWrapper } from './MessageWrapper'; +import { stringify } from '../../types/sqlSharedTypes'; import { OpenGroupRequestCommonType } from '../../data/types'; +import { NetworkTime } from '../../util/NetworkTime'; // ================ SNODE STORE ================ -function overwriteOutgoingTimestampWithNetworkTimestamp(message: { plainTextBuffer: Uint8Array }) { - const networkTimestamp = GetNetworkTime.getNowWithNetworkOffset(); - - const { plainTextBuffer } = message; - const contentDecoded = SignalService.Content.decode(plainTextBuffer); - - const { dataMessage, dataExtractionNotification, typingMessage } = contentDecoded; - if (dataMessage && dataMessage.timestamp && toNumber(dataMessage.timestamp) > 0) { - // this is a sync message, do not overwrite the message timestamp - if (dataMessage.syncTarget) { - return { - overRiddenTimestampBuffer: plainTextBuffer, - networkTimestamp: _.toNumber(dataMessage.timestamp), - }; - } - dataMessage.timestamp = networkTimestamp; - } - if ( - dataExtractionNotification && - dataExtractionNotification.timestamp && - toNumber(dataExtractionNotification.timestamp) > 0 - ) { - dataExtractionNotification.timestamp = networkTimestamp; - } - if (typingMessage && typingMessage.timestamp && toNumber(typingMessage.timestamp) > 0) { - typingMessage.timestamp = networkTimestamp; - } - const overRiddenTimestampBuffer = SignalService.Content.encode(contentDecoded).finish(); - return { overRiddenTimestampBuffer, networkTimestamp }; -} - function getMinRetryTimeout() { return 1000; } function isContentSyncMessage(message: ContentMessage) { if ( - message instanceof ConfigurationMessage || message instanceof ClosedGroupNewMessage || message instanceof UnsendMessage || - message instanceof SharedConfigMessage || (message as any).syncTarget?.length > 0 ) { return true; @@ -88,35 +74,185 @@ function isContentSyncMessage(message: ContentMessage) { return false; } +type StoreRequest05 = + | StoreUserConfigSubRequest + | StoreUserMessageSubRequest + | StoreLegacyGroupMessageSubRequest; +type StoreRequest03 = + | StoreGroupInfoSubRequest + | StoreGroupMembersSubRequest + | StoreGroupKeysSubRequest + | StoreGroupRevokedRetrievableSubRequest + | StoreGroupMessageSubRequest; + +type StoreRequestPerPubkey = T extends PubkeyType + ? StoreRequest05 + : StoreRequest03; + +type EncryptedMessageDetails = Pick< + EncryptAndWrapMessageResults, + | 'namespace' + | 'encryptedAndWrappedData' + | 'identifier' + | 'ttl' + | 'networkTimestamp' + | 'plainTextBuffer' +>; + +async function messageToRequest05({ + destination, + encryptedAndWrapped: { + namespace, + encryptedAndWrappedData, + identifier, + ttl, + networkTimestamp, + plainTextBuffer, + }, +}: { + destination: PubkeyType; + encryptedAndWrapped: EncryptedMessageDetails; +}): Promise { + const shared05Arguments = { + encryptedData: encryptedAndWrappedData, + dbMessageIdentifier: identifier || null, + ttlMs: ttl, + destination, + namespace, + createdAtNetworkTimestamp: networkTimestamp, + plainTextBuffer, + }; + if (namespace === SnodeNamespaces.Default) { + return new StoreUserMessageSubRequest(shared05Arguments); + } + if (namespace === SnodeNamespaces.LegacyClosedGroup) { + return new StoreLegacyGroupMessageSubRequest(shared05Arguments); + } + if (SnodeNamespace.isUserConfigNamespace(namespace)) { + return new StoreUserConfigSubRequest(shared05Arguments); + } + + window.log.error( + `unhandled messageToRequest05 case with details: ${ed25519Str(destination)},namespace: ${namespace}` + ); + throw new Error( + `unhandled messageToRequest05 case for 05 ${ed25519Str(destination)} and namespace ${namespace}` + ); +} + +async function messageToRequest03({ + destination, + encryptedAndWrapped: { namespace, encryptedAndWrappedData, identifier, ttl, networkTimestamp }, +}: { + destination: GroupPubkeyType; + encryptedAndWrapped: Pick< + EncryptAndWrapMessageResults, + 'namespace' | 'encryptedAndWrappedData' | 'identifier' | 'ttl' | 'networkTimestamp' + >; +}): Promise { + const group = await UserGroupsWrapperActions.getGroup(destination); + if (!group) { + window.log.warn( + `messageToRequest03: no such group found in wrapper: ${ed25519Str(destination)}` + ); + throw new Error('messageToRequest03: no such group found in wrapper'); + } + const shared03Arguments = { + encryptedData: encryptedAndWrappedData, + namespace, + ttlMs: ttl, + groupPk: destination, + dbMessageIdentifier: identifier || null, + createdAtNetworkTimestamp: networkTimestamp, + ...group, + }; + if ( + SnodeNamespace.isGroupConfigNamespace(namespace) || + namespace === SnodeNamespaces.ClosedGroupMessages + ) { + return new StoreGroupMessageSubRequest(shared03Arguments); + } + window.log.error( + `unhandled messageToRequest03 case with details: ${ed25519Str(destination)},namespace: ${namespace}` + ); + throw new Error( + `unhandled messageToRequest03 case for 03 ${ed25519Str(destination)} and namespace ${namespace}` + ); +} + +async function messageToRequest({ + destination, + encryptedAndWrapped, +}: { + destination: T; + encryptedAndWrapped: EncryptedMessageDetails; +}): Promise> { + if (PubKey.is03Pubkey(destination)) { + const req = await messageToRequest03({ destination, encryptedAndWrapped }); + return req as StoreRequestPerPubkey; // this is mandatory, sadly + } + if (PubKey.is05Pubkey(destination)) { + const req = await messageToRequest05({ + destination, + encryptedAndWrapped, + }); + return req as StoreRequestPerPubkey; // this is mandatory, sadly + } + + throw new Error('messageToRequest: unhandled case'); +} + +async function messagesToRequests({ + destination, + encryptedAndWrappedArr, +}: { + destination: T; + encryptedAndWrappedArr: Array; +}): Promise>> { + const subRequests: Array> = []; + for (let index = 0; index < encryptedAndWrappedArr.length; index++) { + const encryptedAndWrapped = encryptedAndWrappedArr[index]; + // eslint-disable-next-line no-await-in-loop + const req = await messageToRequest({ destination, encryptedAndWrapped }); + subRequests.push(req); + } + return subRequests; +} + /** * Send a single message via service nodes. * * @param message The message to send. * @param attempts The amount of times to attempt sending. Minimum value is 1. */ -async function send({ + +async function sendSingleMessage({ message, retryMinTimeout = 100, attempts = 3, isSyncMessage, }: { - message: RawMessage; + message: OutgoingRawMessage; attempts?: number; retryMinTimeout?: number; // in ms isSyncMessage: boolean; }): Promise<{ wrappedEnvelope: Uint8Array; effectiveTimestamp: number }> { + const destination = message.device; + if (!PubKey.is03Pubkey(destination) && !PubKey.is05Pubkey(destination)) { + throw new Error('MessageSender rawMessage was given invalid pubkey'); + } return pRetry( async () => { const recipient = PubKey.cast(message.device); - // we can only have a single message in this send function for now - const [encryptedAndWrapped] = await encryptMessagesAndWrap([ + const [encryptedAndWrapped] = await MessageWrapper.encryptMessagesAndWrap([ { destination: message.device, plainTextBuffer: message.plainTextBuffer, namespace: message.namespace, ttl: message.ttl, identifier: message.identifier, + networkTimestamp: message.networkTimestampCreated, isSyncMessage: Boolean(isSyncMessage), }, ]); @@ -125,15 +261,12 @@ async function send({ // before we return from the await below. // and the isDuplicate messages relies on sent_at timestamp to be valid. const found = await Data.getMessageById(encryptedAndWrapped.identifier); + // make sure to not update the sent timestamp if this a currently syncing message if (found && !found.get('sentSync')) { found.set({ sent_at: encryptedAndWrapped.networkTimestamp }); await found.commit(); } - let foundMessage = encryptedAndWrapped.identifier - ? await Data.getMessageById(encryptedAndWrapped.identifier) - : null; - const isSyncedDeleteAfterReadMessage = found && UserUtils.isUsFromCache(recipient.key) && @@ -141,63 +274,29 @@ async function send({ found.getExpireTimerSeconds() > 0 && encryptedAndWrapped.isSyncMessage; - let overridenTtl = encryptedAndWrapped.ttl; + let overriddenTtl = encryptedAndWrapped.ttl; if (isSyncedDeleteAfterReadMessage && found.getExpireTimerSeconds() > 0) { const asMs = found.getExpireTimerSeconds() * 1000; window.log.debug(`overriding ttl for synced DaR message to ${asMs}`); - overridenTtl = asMs; + overriddenTtl = asMs; } - const batchResult = await MessageSender.sendMessagesDataToSnode( - [ - { - pubkey: recipient.key, - data64: encryptedAndWrapped.data64, - ttl: overridenTtl, - timestamp: encryptedAndWrapped.networkTimestamp, - namespace: encryptedAndWrapped.namespace, - }, - ], - recipient.key, - null + const subRequests = await messagesToRequests({ + encryptedAndWrappedArr: [{ ...encryptedAndWrapped, ttl: overriddenTtl }], + destination, + }); + + const targetNode = await SnodePool.getNodeFromSwarmOrThrow(destination); + const batchResult = await BatchRequests.doUnsignedSnodeBatchRequestNoRetries( + subRequests, + targetNode, + 6000, + destination, + false ); - - const isDestinationClosedGroup = getConversationController() - .get(recipient.key) - ?.isClosedGroup(); - const storedAt = batchResult?.[0]?.body?.t; - const storedHash = batchResult?.[0]?.body?.hash; - - if ( - batchResult && - !isEmpty(batchResult) && - batchResult[0].code === 200 && - !isEmpty(storedHash) && - isString(storedHash) && - isNumber(storedAt) - ) { - // TODO: the expiration is due to be returned by the storage server on "store" soon, we will then be able to use it instead of doing the storedAt + ttl logic below - // if we have a hash and a storedAt, mark it as seen so we don't reprocess it on the next retrieve - await Data.saveSeenMessageHashes([ - { expiresAt: storedAt + encryptedAndWrapped.ttl, hash: storedHash }, - ]); - // If message also has a sync message, save that hash. Otherwise save the hash from the regular message send i.e. only closed groups in this case. - - if ( - encryptedAndWrapped.identifier && - (encryptedAndWrapped.isSyncMessage || isDestinationClosedGroup) - ) { - // get a fresh copy of the message from the DB - foundMessage = await Data.getMessageById(encryptedAndWrapped.identifier); - if (foundMessage) { - await foundMessage.updateMessageHash(storedHash); - await foundMessage.commit(); - } - } - } - + await handleBatchResultWithSubRequests({ batchResult, subRequests, destination }); return { - wrappedEnvelope: encryptedAndWrapped.data, + wrappedEnvelope: encryptedAndWrapped.encryptedAndWrappedData, effectiveTimestamp: encryptedAndWrapped.networkTimestamp, }; }, @@ -209,204 +308,204 @@ async function send({ ); } -async function sendMessagesDataToSnode( - params: Array, - destination: string, - messagesHashesToDelete: Set | null -): Promise { - const rightDestination = params.filter(m => m.pubkey === destination); - const swarm = await getSwarmFor(destination); - - const withSigWhenRequired: Array = await Promise.all( - rightDestination.map(async item => { - // some namespaces require a signature to be added - let signOpts: SnodeSignatureResult | undefined; - if (SnodeNamespace.isUserConfigNamespace(item.namespace)) { - signOpts = await SnodeSignature.getSnodeSignatureParams({ - method: 'store' as const, - namespace: item.namespace, - pubkey: destination, - }); - } - const store: StoreOnNodeParams = { - data: item.data64, - namespace: item.namespace, - pubkey: item.pubkey, - timestamp: item.timestamp, - // sig_timestamp: item.timestamp, - // sig_timestamp is currently not forwarded from the receiving snode to the other swarm members, and so their sig verify fail. - // This timestamp is not really needed so we just don't send it in the meantime (the timestamp value is used if the sig_timestamp is not present) - ttl: item.ttl, - ...signOpts, - }; - return store; +async function getSignatureParamsFromNamespace( + { namespace }: { namespace: SnodeNamespaces }, + destination: string +): Promise { + const store = 'store' as const; + if (SnodeNamespace.isUserConfigNamespace(namespace)) { + const ourPrivKey = (await UserUtils.getUserED25519KeyPairBytes())?.privKeyBytes; + if (!ourPrivKey) { + throw new Error( + 'getSignatureParamsFromNamespace UserUtils.getUserED25519KeyPairBytes is empty' + ); + } + return SnodeSignature.getSnodeSignatureParamsUs({ + method: store, + namespace, + }); + } + + if ( + SnodeNamespace.isGroupConfigNamespace(namespace) || + namespace === SnodeNamespaces.ClosedGroupMessages || + namespace === SnodeNamespaces.ClosedGroupRevokedRetrievableMessages + ) { + if (!PubKey.is03Pubkey(destination)) { + throw new Error( + 'getSignatureParamsFromNamespace: group config namespace required a 03 pubkey' + ); + } + const found = await UserGroupsWrapperActions.getGroup(destination); + return SnodeGroupSignature.getSnodeGroupSignature({ + method: store, + namespace, + group: found, + }); + } + // no signature required for this namespace/pubkey combo + return {}; +} + +function logBuildSubRequests(subRequests: Array) { + if (!window.sessionFeatureFlags.debug.debugBuiltSnodeRequests) { + return; + } + window.log.debug( + `\n========================================\nsubRequests: [\n\t${subRequests + .map(m => { + return stringify(m); + }) + .join(',\n\t')}]\n========================================` + ); +} + +async function signSubRequests( + params: Array +): Promise> { + const signedRequests: Array = await Promise.all( + params.map(p => { + return p.build(); }) ); - const signedDeleteOldHashesRequest = - messagesHashesToDelete && messagesHashesToDelete.size - ? await SnodeSignature.getSnodeSignatureByHashesParams({ - method: 'delete' as const, - messages: [...messagesHashesToDelete], - pubkey: destination, - }) - : null; - - const snode = sample(swarm); - if (!snode) { - throw new EmptySwarmError(destination, 'Ran out of swarm nodes to query'); - } + logBuildSubRequests(signedRequests); - try { - // No pRetry here as if this is a bad path it will be handled and retried in lokiOnionFetch. - const storeResults = await SnodeAPIStore.storeOnNode( - snode, - withSigWhenRequired, - signedDeleteOldHashesRequest - ); + return signedRequests; +} - if (!isEmpty(storeResults)) { - window?.log?.info( - `sendMessagesToSnode - Successfully stored messages to ${ed25519Str(destination)} via ${ - snode.ip - }:${snode.port} on namespaces: ${rightDestination.map(m => m.namespace).join(',')}` - ); - } +type DeleteHashesRequestPerPubkey = T extends PubkeyType + ? DeleteHashesFromUserNodeSubRequest + : DeleteHashesFromGroupNodeSubRequest; - return storeResults; - } catch (e) { - const snodeStr = snode ? `${snode.ip}:${snode.port}` : 'null'; - window?.log?.warn( - `sendMessagesToSnode - "${e.code}:${e.message}" to ${destination} via snode:${snodeStr}` +/** + * Make sure that all the sub requests have been given in their sendingOrder, or throw an error. + */ +function assertRequestsAreSorted({ subRequests }: { subRequests: Array }) { + const allSorted = subRequests.every((current, index) => { + const currentOrder = current.requestOrder(); + const previousOrder = + index > 0 ? subRequests[index - 1].requestOrder() : Number.MIN_SAFE_INTEGER; + return currentOrder >= previousOrder; + }); + if (!allSorted) { + throw new Error( + 'assertRequestsAreSorted: Some sub requests are not correctly sorted by requestOrder().' ); - throw e; } } -function encryptionBasedOnConversation(destination: PubKey) { - if (getConversationController().get(destination.key)?.isClosedGroup()) { - return SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE; +type SortedSubRequestsType = Array< + | StoreRequestPerPubkey + | DeleteHashesRequestPerPubkey + | DeleteAllFromGroupMsgNodeSubRequest + | SubaccountRevokeSubRequest + | SubaccountUnrevokeSubRequest +>; + +async function sendMessagesDataToSnode({ + associatedWith, + sortedSubRequests, + method, +}: { + sortedSubRequests: SortedSubRequestsType; + associatedWith: T; + method: MethodBatchType; +}): Promise { + if (!associatedWith) { + throw new Error('sendMessagesDataToSnode first sub request pubkey needs to be set'); } - return SignalService.Envelope.Type.SESSION_MESSAGE; -} -type SharedEncryptAndWrap = { - ttl: number; - identifier: string; - isSyncMessage: boolean; -}; + if (sortedSubRequests.some(m => m.destination !== associatedWith)) { + throw new Error( + 'sendMessagesDataToSnode tried to send batch request containing sub request not for the right destination' + ); + } -type EncryptAndWrapMessage = { - plainTextBuffer: Uint8Array; - destination: string; - namespace: number | null; -} & SharedEncryptAndWrap; - -type EncryptAndWrapMessageResults = { - data64: string; - networkTimestamp: number; - data: Uint8Array; - namespace: number; -} & SharedEncryptAndWrap; - -async function encryptMessageAndWrap( - params: EncryptAndWrapMessage -): Promise { - const { - destination, - identifier, - isSyncMessage: syncMessage, - namespace, - plainTextBuffer, - ttl, - } = params; + // Note: we want to make sure the caller sorted those sub requests, as it might try to handle the batch result based on the index. + // If we sorted the requests here, we'd need to make sure the caller knows that the results are not in order he sent them. + assertRequestsAreSorted({ subRequests: sortedSubRequests }); - const { overRiddenTimestampBuffer, networkTimestamp } = - overwriteOutgoingTimestampWithNetworkTimestamp({ plainTextBuffer }); - const recipient = PubKey.cast(destination); + const targetNode = await SnodePool.getNodeFromSwarmOrThrow(associatedWith); - const { envelopeType, cipherText } = await MessageEncrypter.encrypt( - recipient, - overRiddenTimestampBuffer, - encryptionBasedOnConversation(recipient) - ); + try { + const responses = await BatchRequests.doUnsignedSnodeBatchRequestNoRetries( + sortedSubRequests, + targetNode, + 6000, + associatedWith, + false, + method + ); - const envelope = await buildEnvelope(envelopeType, recipient.key, networkTimestamp, cipherText); + if (!responses || !responses.length) { + window?.log?.warn( + `SessionSnodeAPI::doUnsignedSnodeBatchRequestNoRetries on ${targetNode.ip}:${targetNode.port} returned falsy value`, + responses + ); + throw new Error('doUnsignedSnodeBatchRequestNoRetries: Invalid result'); + } + await handleBatchResultWithSubRequests({ + batchResult: responses, + subRequests: sortedSubRequests, + destination: associatedWith, + }); - const data = wrapEnvelope(envelope); - const data64 = ByteBuffer.wrap(data).toString('base64'); + const firstResult = responses[0]; - // override the namespaces if those are unset in the incoming messages - // right when we upgrade from not having namespaces stored in the outgoing cached messages our messages won't have a namespace associated. - // So we need to keep doing the lookup of where they should go if the namespace is not set. + if (firstResult.code !== 200) { + window?.log?.warn( + 'first result status is not 200 for sendMessagesDataToSnode but: ', + firstResult.code + ); + throw new Error('sendMessagesDataToSnode: Invalid status code'); + } - const overridenNamespace = !isNil(namespace) - ? namespace - : getConversationController().get(recipient.key)?.isClosedGroup() - ? SnodeNamespaces.ClosedGroupMessage - : SnodeNamespaces.UserMessages; + GetNetworkTime.handleTimestampOffsetFromNetwork('store', firstResult.body.t); - return { - data64, - networkTimestamp, - data, - namespace: overridenNamespace, - ttl, - identifier, - isSyncMessage: syncMessage, - }; -} + if (!isEmpty(responses)) { + window?.log?.info( + `sendMessagesDataToSnode - Successfully sent requests to ${ed25519Str( + associatedWith + )} via ${ed25519Str(targetNode.pubkey_ed25519)} (requests: ${sortedSubRequests.map(m => m.loggingId()).join(', ')})` + ); + } -async function encryptMessagesAndWrap( - messages: Array -): Promise> { - return Promise.all(messages.map(encryptMessageAndWrap)); + return responses; + } catch (e) { + const snodeStr = targetNode ? `${ed25519Str(targetNode.pubkey_ed25519)}` : 'null'; + window?.log?.warn( + `sendMessagesDataToSnode - "${e.code}:${e.message}" to ${associatedWith} via snode:${snodeStr}` + ); + throw e; + } } /** - * Send a list of messages to a single service node. - * Used currently only for sending SharedConfigMessage for multiple messages at a time. + * Send an array of pre-encrypted data to the corresponding swarm. + * Note: also handles the result of each sub requests with `handleBatchResultWithSubRequests` * - * @param params the messages to deposit - * @param destination the pubkey we should deposit those message for - * @returns the hashes of successful deposit + * @param params the data to deposit + * @param destination the pubkey we should deposit those message to + * @returns the batch/sequence results if further processing is needed */ -async function sendMessagesToSnode( - params: Array, - destination: string, - messagesHashesToDelete: Set | null -): Promise { +async function sendEncryptedDataToSnode({ + destination, + sortedSubRequests, + method, +}: { + sortedSubRequests: SortedSubRequestsType; // keeping those as an array because the order needs to be enforced for some (group keys for instance) + destination: T; + method: MethodBatchType; +}): Promise { try { - const recipient = PubKey.cast(destination); - - const encryptedAndWrapped: Array> = - []; - - params.forEach(m => { - const wrapped = { - identifier: m.message.identifier, - isSyncMessage: MessageSender.isContentSyncMessage(m.message), - namespace: m.namespace, - ttl: m.message.ttl(), - networkTimestamp: GetNetworkTime.getNowWithNetworkOffset(), - data64: ByteBuffer.wrap(m.message.readyToSendData).toString('base64'), - }; - encryptedAndWrapped.push(wrapped); - }); - const batchResults = await pRetry( async () => { - return MessageSender.sendMessagesDataToSnode( - encryptedAndWrapped.map(wrapped => ({ - pubkey: recipient.key, - data64: wrapped.data64, - ttl: wrapped.ttl, - timestamp: wrapped.networkTimestamp, - namespace: wrapped.namespace, - })), - recipient.key, - messagesHashesToDelete - ); + return MessageSender.sendMessagesDataToSnode({ + sortedSubRequests, + associatedWith: destination, + method, + }); }, { retries: 2, @@ -417,55 +516,16 @@ async function sendMessagesToSnode( ); if (!batchResults || isEmpty(batchResults)) { - throw new Error('result is empty for sendMessagesToSnode'); + throw new Error('result is empty for sendEncryptedDataToSnode'); } return batchResults; } catch (e) { - window.log.warn(`sendMessagesToSnode failed with ${e.message}`); + window.log.warn(`sendEncryptedDataToSnode failed with ${e.message}`); return null; } } -async function buildEnvelope( - type: SignalService.Envelope.Type, - sskSource: string | undefined, - timestamp: number, - content: Uint8Array -): Promise { - let source: string | undefined; - - if (type === SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE) { - source = sskSource; - } - - return SignalService.Envelope.create({ - type, - source, - timestamp, - content, - }); -} - -/** - * This is an outdated practice and we should probably just send the envelope data directly. - * Something to think about in the future. - */ -function wrapEnvelope(envelope: SignalService.Envelope): Uint8Array { - const request = SignalService.WebSocketRequestMessage.create({ - id: 0, - body: SignalService.Envelope.encode(envelope).finish(), - verb: 'PUT', - path: '/api/v1/message', - }); - - const websocket = SignalService.WebSocketMessage.create({ - type: SignalService.WebSocketMessage.Type.REQUEST, - request, - }); - return SignalService.WebSocketMessage.encode(websocket).finish(); -} - // ================ Open Group ================ /** * Send a message to an open group v2. @@ -477,10 +537,10 @@ async function sendToOpenGroupV2( blinded: boolean, filesToLink: Array ): Promise { - // we agreed to pad message for opengroupv2 + // we agreed to pad message for opengroup v2 const paddedBody = addMessagePadding(rawMessage.plainTextBuffer()); const v2Message = new OpenGroupMessageV2({ - sentTimestamp: GetNetworkTime.getNowWithNetworkOffset(), + sentTimestamp: NetworkTime.now(), base64EncodedData: fromUInt8ArrayToBase64(paddedBody), filesToLink, }); @@ -505,7 +565,7 @@ async function sendToOpenGroupV2BlindedRequest( recipientBlindedId: string ): Promise<{ serverId: number; serverTimestamp: number }> { const v2Message = new OpenGroupMessageV2({ - sentTimestamp: GetNetworkTime.getNowWithNetworkOffset(), + sentTimestamp: NetworkTime.now(), base64EncodedData: fromUInt8ArrayToBase64(encryptedContent), }); @@ -523,9 +583,87 @@ async function sendToOpenGroupV2BlindedRequest( export const MessageSender = { sendToOpenGroupV2BlindedRequest, sendMessagesDataToSnode, - sendMessagesToSnode, + sendEncryptedDataToSnode, getMinRetryTimeout, sendToOpenGroupV2, - send, + sendSingleMessage, isContentSyncMessage, + getSignatureParamsFromNamespace, + signSubRequests, + messagesToRequests, + destinationIsClosedGroup, }; + +function destinationIsClosedGroup(destination: string) { + return ConvoHub.use().get(destination)?.isClosedGroup(); +} + +/** + * Note: this function does not handle the syncing logic of messages yet. + * Use it to push message to group, to note to self, or with user messages which do not require a syncing logic + */ +async function handleBatchResultWithSubRequests({ + batchResult, + destination, + subRequests, +}: { + batchResult: NotEmptyArrayOfBatchResults; + subRequests: Array; + destination: string; +}) { + if (!batchResult || !isArray(batchResult) || isEmpty(batchResult)) { + window.log.error('handleBatchResultWithSubRequests: invalid batch result '); + return; + } + + const seenHashes: Array = []; + + for (let index = 0; index < subRequests.length; index++) { + const subRequest = subRequests[index]; + + // there are some things we need to do when storing messages + // for groups/legacy groups or user (but not for config messages) + if ( + subRequest instanceof StoreGroupMessageSubRequest || + subRequest instanceof StoreLegacyGroupMessageSubRequest || + subRequest instanceof StoreUserMessageSubRequest + ) { + const storedAt = batchResult?.[index]?.body?.t; + const storedHash = batchResult?.[index]?.body?.hash; + const subRequestStatusCode = batchResult?.[index]?.code; + // TODO: the expiration is due to be returned by the storage server on "store" soon, we will then be able to use it instead of doing the storedAt + ttl logic below + // if we have a hash and a storedAt, mark it as seen so we don't reprocess it on the next retrieve + if ( + subRequestStatusCode === 200 && + !isEmpty(storedHash) && + isString(storedHash) && + isNumber(storedAt) + ) { + seenHashes.push({ + expiresAt: NetworkTime.now() + TTL_DEFAULT.CONTENT_MESSAGE, // non config msg expire at CONTENT_MESSAGE at most + hash: storedHash, + }); + + // We need to store the hash of our synced message when for a 1o1. (as this is the one stored on our swarm) + // For groups, we can just store that hash directly as the group's swarm is hosting all of the group messages + if (subRequest.dbMessageIdentifier) { + // eslint-disable-next-line no-await-in-loop + await MessageSentHandler.handleSwarmMessageSentSuccess( + { + device: subRequest.destination, + isDestinationClosedGroup: MessageSender.destinationIsClosedGroup(destination), + identifier: subRequest.dbMessageIdentifier, + plainTextBuffer: + subRequest instanceof StoreUserMessageSubRequest + ? subRequest.plainTextBuffer + : null, + }, + subRequest.createdAtNetworkTimestamp, + storedHash + ); + } + } + } + } + await Data.saveSeenMessageHashes(seenHashes); +} diff --git a/ts/session/sending/MessageSentHandler.ts b/ts/session/sending/MessageSentHandler.ts index e25cd1af36..c503f2f7ac 100644 --- a/ts/session/sending/MessageSentHandler.ts +++ b/ts/session/sending/MessageSentHandler.ts @@ -1,10 +1,9 @@ -import _ from 'lodash'; +import { union } from 'lodash'; import { Data } from '../../data/data'; import { SignalService } from '../../protobuf'; -import { PnServer } from '../apis/push_notification_api'; import { DisappearingMessages } from '../disappearing_messages'; import { OpenGroupVisibleMessage } from '../messages/outgoing/visibleMessage/OpenGroupVisibleMessage'; -import { RawMessage } from '../types'; +import { OutgoingRawMessage, PubKey } from '../types'; import { UserUtils } from '../utils'; async function handlePublicMessageSentSuccess( @@ -41,29 +40,60 @@ async function handlePublicMessageSentSuccess( } } -async function handleMessageSentSuccess( - sentMessage: RawMessage, +async function handlePublicMessageSentFailure(sentMessage: OpenGroupVisibleMessage, error: any) { + const fetchedMessage = await fetchHandleMessageSentData(sentMessage.identifier); + if (!fetchedMessage) { + return; + } + + if (error instanceof Error) { + await fetchedMessage.saveErrors(error); + } + + // always mark the message as sent. + // the fact that we have errors on the sent is based on the saveErrors() + fetchedMessage.set({ + sent: true, + }); + + await fetchedMessage.commit(); + await fetchedMessage.getConversation()?.updateLastMessage(); +} + +async function handleSwarmMessageSentSuccess( + { + device: destination, + identifier, + isDestinationClosedGroup, + plainTextBuffer, + }: Pick & { + /** + * plainTextBuffer is only required when sending a message to a 1o1, + * as we need it to encrypt it again for our linked devices (synced messages) + */ + plainTextBuffer: Uint8Array | null; + /** + * We must not sync a message when it was sent to a closed group + */ + isDestinationClosedGroup: boolean; + }, effectiveTimestamp: number, - wrappedEnvelope?: Uint8Array + storedHash: string | null ) { // The wrappedEnvelope will be set only if the message is not one of OpenGroupV2Message type. - let fetchedMessage = await fetchHandleMessageSentData(sentMessage.identifier); + let fetchedMessage = await fetchHandleMessageSentData(identifier); if (!fetchedMessage) { return; } let sentTo = fetchedMessage.get('sent_to') || []; - const isOurDevice = UserUtils.isUsFromCache(sentMessage.device); + const isOurDevice = UserUtils.isUsFromCache(destination); - // FIXME this is not correct and will cause issues with syncing - // At this point the only way to check for medium - // group is by comparing the encryption type - const isClosedGroupMessage = - sentMessage.encryption === SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE; + const isClosedGroupMessage = isDestinationClosedGroup || PubKey.is03Pubkey(destination); // We trigger a sync message only when the message is not to one of our devices, AND - // the message is not for an open group (there is no sync for opengroups, each device pulls all messages), AND + // the message is not for a group (there is no sync for groups, each device pulls all messages), AND // if we did not sync or trigger a sync message for this specific message already const shouldTriggerSyncMessage = !isOurDevice && @@ -73,53 +103,41 @@ async function handleMessageSentSuccess( // A message is synced if we triggered a sync message (sentSync) // and the current message was sent to our device (so a sync message) - const shouldMarkMessageAsSynced = isOurDevice && fetchedMessage.get('sentSync'); - - const contentDecoded = SignalService.Content.decode(sentMessage.plainTextBuffer); - const { dataMessage } = contentDecoded; - - /** - * We should hit the notify endpoint for push notification only if: - * • It's a one-to-one chat or a closed group - * • The message has either text or attachments - */ - const hasBodyOrAttachments = Boolean( - dataMessage && (dataMessage.body || (dataMessage.attachments && dataMessage.attachments.length)) - ); - const shouldNotifyPushServer = hasBodyOrAttachments && !isOurDevice; - - if (shouldNotifyPushServer) { - // notify the push notification server if needed - if (!wrappedEnvelope) { - window?.log?.warn('Should send PN notify but no wrapped envelope set.'); - } else { - // we do not really care about the result, neither of waiting for it - void PnServer.notifyPnServer(wrappedEnvelope, sentMessage.device); - } - } + const shouldMarkMessageAsSynced = + (isOurDevice && fetchedMessage.get('sentSync')) || isClosedGroupMessage; // Handle the sync logic here - if (shouldTriggerSyncMessage) { - if (dataMessage) { - try { - await fetchedMessage.sendSyncMessage(contentDecoded, effectiveTimestamp); - const tempFetchMessage = await fetchHandleMessageSentData(sentMessage.identifier); - if (!tempFetchMessage) { - window?.log?.warn( - 'Got an error while trying to sendSyncMessage(): fetchedMessage is null' - ); - return; + if (shouldTriggerSyncMessage && plainTextBuffer) { + try { + const contentDecoded = SignalService.Content.decode(plainTextBuffer); + if (contentDecoded && contentDecoded.dataMessage) { + try { + await fetchedMessage.sendSyncMessage(contentDecoded, effectiveTimestamp); + const tempFetchMessage = await fetchHandleMessageSentData(identifier); + if (!tempFetchMessage) { + window?.log?.warn( + 'Got an error while trying to sendSyncMessage(): fetchedMessage is null' + ); + return; + } + fetchedMessage = tempFetchMessage; + } catch (e) { + window?.log?.warn('Got an error while trying to sendSyncMessage():', e); } - fetchedMessage = tempFetchMessage; - } catch (e) { - window?.log?.warn('Got an error while trying to sendSyncMessage():', e); } + } catch (e) { + window.log.info( + 'failed to decode content (excpected except if message was for a 1o1 as we need it to send the sync message' + ); } } else if (shouldMarkMessageAsSynced) { fetchedMessage.set({ synced: true }); } - sentTo = _.union(sentTo, [sentMessage.device]); + sentTo = union(sentTo, [destination]); + if (storedHash) { + fetchedMessage.updateMessageHash(storedHash); + } fetchedMessage.set({ sent_to: sentTo, @@ -133,8 +151,8 @@ async function handleMessageSentSuccess( fetchedMessage.getConversation()?.updateLastMessage(); } -async function handleMessageSentFailure( - sentMessage: RawMessage | OpenGroupVisibleMessage, +async function handleSwarmMessageSentFailure( + sentMessage: Pick, error: any ) { const fetchedMessage = await fetchHandleMessageSentData(sentMessage.identifier); @@ -146,14 +164,12 @@ async function handleMessageSentFailure( await fetchedMessage.saveErrors(error); } - if (!(sentMessage instanceof OpenGroupVisibleMessage)) { - const isOurDevice = UserUtils.isUsFromCache(sentMessage.device); - // if this message was for ourself, and it was not already synced, - // it means that we failed to sync it. - // so just remove the flag saying that we are currently sending the sync message - if (isOurDevice && !fetchedMessage.get('sync')) { - fetchedMessage.set({ sentSync: false }); - } + const isOurDevice = UserUtils.isUsFromCache(sentMessage.device); + // if this message was for ourself, and it was not already synced, + // it means that we failed to sync it. + // so just remove the flag saying that we are currently sending the sync message + if (isOurDevice && !fetchedMessage.get('sync')) { + fetchedMessage.set({ sentSync: false }); } // always mark the message as sent. @@ -168,7 +184,7 @@ async function handleMessageSentFailure( expirationStartTimestamp: undefined, }); window.log.warn( - `[handleMessageSentFailure] Stopping a message from disppearing until we retry the send operation. messageId: ${fetchedMessage.get( + `[handleSwarmMessageSentFailure] Stopping a message from disppearing until we retry the send operation. messageId: ${fetchedMessage.get( 'id' )}` ); @@ -198,6 +214,7 @@ async function fetchHandleMessageSentData(messageIdentifier: string) { export const MessageSentHandler = { handlePublicMessageSentSuccess, - handleMessageSentSuccess, - handleMessageSentFailure, + handlePublicMessageSentFailure, + handleSwarmMessageSentFailure, + handleSwarmMessageSentSuccess, }; diff --git a/ts/session/sending/MessageWrapper.ts b/ts/session/sending/MessageWrapper.ts new file mode 100644 index 0000000000..f519b478f1 --- /dev/null +++ b/ts/session/sending/MessageWrapper.ts @@ -0,0 +1,167 @@ +import { SignalService } from '../../protobuf'; +import { ConvoHub } from '../conversations'; +import { MessageEncrypter } from '../crypto/MessageEncrypter'; +import { PubKey } from '../types'; + +function encryptionBasedOnConversation(destination: PubKey) { + if (ConvoHub.use().get(destination.key)?.isClosedGroup()) { + return SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE; + } + return SignalService.Envelope.Type.SESSION_MESSAGE; +} + +type SharedEncryptAndWrap = { + ttl: number; + identifier: string; + isSyncMessage: boolean; + plainTextBuffer: Uint8Array; +}; + +type EncryptAndWrapMessage = { + destination: string; + namespace: number; + networkTimestamp: number; +} & SharedEncryptAndWrap; + +export type EncryptAndWrapMessageResults = { + networkTimestamp: number; + encryptedAndWrappedData: Uint8Array; + namespace: number; +} & SharedEncryptAndWrap; + +async function encryptForGroupV2( + params: EncryptAndWrapMessage +): Promise { + // Group v2 encryption works a bit differently: we encrypt the envelope itself through libsession. + // We essentially need to do the opposite of the usual encryption which is send envelope unencrypted with content encrypted. + const { + destination, + identifier, + isSyncMessage: syncMessage, + namespace, + plainTextBuffer, + ttl, + networkTimestamp, + } = params; + + const envelope = MessageWrapper.wrapContentIntoEnvelope( + SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE, + destination, + networkTimestamp, + plainTextBuffer + ); + + const recipient = PubKey.cast(destination); + + const { cipherText } = await MessageEncrypter.encrypt( + recipient, + SignalService.Envelope.encode(envelope).finish(), + encryptionBasedOnConversation(recipient) + ); + + return { + networkTimestamp, + encryptedAndWrappedData: cipherText, + namespace, + ttl, + identifier, + isSyncMessage: syncMessage, + plainTextBuffer, + }; +} + +function wrapContentIntoEnvelope( + type: SignalService.Envelope.Type, + sskSource: string | undefined, + timestamp: number, + content: Uint8Array +): SignalService.Envelope { + let source: string | undefined; + + if (type === SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE) { + source = sskSource; + } + + return SignalService.Envelope.create({ + type, + source, + timestamp, + content, + }); +} +/** + * This is an outdated practice and we should probably just send the envelope data directly. + * Something to think about in the future. + */ +function wrapEnvelopeInWebSocketMessage(envelope: SignalService.Envelope): Uint8Array { + const request = SignalService.WebSocketRequestMessage.create({ + id: 0, + body: SignalService.Envelope.encode(envelope).finish(), + verb: 'PUT', + path: '/api/v1/message', + }); + + const websocket = SignalService.WebSocketMessage.create({ + type: SignalService.WebSocketMessage.Type.REQUEST, + request, + }); + return SignalService.WebSocketMessage.encode(websocket).finish(); +} + +async function encryptMessageAndWrap( + params: EncryptAndWrapMessage +): Promise { + const { + destination, + identifier, + isSyncMessage: syncMessage, + namespace, + plainTextBuffer, + ttl, + networkTimestamp, + } = params; + + if (PubKey.is03Pubkey(destination)) { + return encryptForGroupV2(params); + } + + // can only be legacy group or 1o1 chats here + + const recipient = PubKey.cast(destination); + + const { envelopeType, cipherText } = await MessageEncrypter.encrypt( + recipient, + plainTextBuffer, + encryptionBasedOnConversation(recipient) + ); + + const envelope = MessageWrapper.wrapContentIntoEnvelope( + envelopeType, + recipient.key, + networkTimestamp, + cipherText + ); + const data = MessageWrapper.wrapEnvelopeInWebSocketMessage(envelope); + + return { + encryptedAndWrappedData: data, + networkTimestamp, + namespace, + ttl, + identifier, + isSyncMessage: syncMessage, + plainTextBuffer, + }; +} + +async function encryptMessagesAndWrap( + messages: Array +): Promise> { + return Promise.all(messages.map(encryptMessageAndWrap)); +} + +export const MessageWrapper = { + wrapEnvelopeInWebSocketMessage, + wrapContentIntoEnvelope, + encryptMessagesAndWrap, +}; diff --git a/ts/session/sending/PendingMessageCache.ts b/ts/session/sending/PendingMessageCache.ts index f199dd8fd5..917a02dfbd 100644 --- a/ts/session/sending/PendingMessageCache.ts +++ b/ts/session/sending/PendingMessageCache.ts @@ -1,10 +1,11 @@ -import _ from 'lodash'; +import { from_hex, to_hex } from 'libsodium-wrappers-sumo'; +import _, { compact, isNumber } from 'lodash'; import { Data } from '../../data/data'; import { Storage } from '../../util/storage'; import { SnodeNamespaces } from '../apis/snode_api/namespaces'; import { ContentMessage } from '../messages/outgoing'; import { PubKey } from '../types'; -import { PartialRawMessage, RawMessage } from '../types/RawMessage'; +import { OutgoingRawMessage, StoredRawMessage } from '../types/RawMessage'; import { MessageUtils } from '../utils'; // This is an abstraction for storing pending messages. @@ -15,18 +16,18 @@ import { MessageUtils } from '../utils'; // memory and sync its state with the database on modification (add or remove). export class PendingMessageCache { - public callbacks: Map Promise> = new Map(); + public callbacks: Map Promise> = new Map(); protected loadPromise: Promise | undefined; - protected cache: Array = []; + protected cache: Array = []; - public async getAllPending(): Promise> { + public async getAllPending(): Promise> { await this.loadFromDBIfNeeded(); // Get all pending from cache return [...this.cache]; } - public async getForDevice(device: PubKey): Promise> { + public async getForDevice(device: PubKey): Promise> { const pending = await this.getAllPending(); return pending.filter(m => m.device === device.key); } @@ -46,7 +47,7 @@ export class PendingMessageCache { namespace: SnodeNamespaces, sentCb?: (message: any) => Promise, isGroup = false - ): Promise { + ): Promise { await this.loadFromDBIfNeeded(); const rawMessage = await MessageUtils.toRawMessage( destinationPubKey, @@ -69,7 +70,7 @@ export class PendingMessageCache { return rawMessage; } - public async remove(message: RawMessage): Promise | undefined> { + public async remove(message: OutgoingRawMessage): Promise | undefined> { await this.loadFromDBIfNeeded(); // Should only be called after message is processed @@ -89,7 +90,7 @@ export class PendingMessageCache { return updatedCache; } - public find(message: RawMessage): RawMessage | undefined { + public find(message: OutgoingRawMessage): OutgoingRawMessage | undefined { // Find a message in the cache return this.cache.find(m => m.device === message.device && m.identifier === message.identifier); } @@ -114,33 +115,62 @@ export class PendingMessageCache { this.cache = messages; } - protected async getFromStorage(): Promise> { + protected async getFromStorage(): Promise> { const data = await Data.getItemById('pendingMessages'); if (!data || !data.value) { return []; } - const barePending = JSON.parse(String(data.value)) as Array; - - // Rebuild plainTextBuffer - return barePending.map((message: PartialRawMessage) => { - return { - ...message, - plainTextBuffer: new Uint8Array(message.plainTextBuffer), - } as RawMessage; - }); + try { + // let's do some cleanup, read what we have in DB, remove what is invalid, write to DB, and return filtered data. + // this is because we've added some mandatory fields recently, and the current stored messages won't have them. + const barePending = JSON.parse(String(data.value)) as Array; + + const filtered = compact( + barePending.map((message: StoredRawMessage) => { + try { + // let's skip outgoing messages which have no networkTimestamp associated with them, as we need one to send a message (mapped to the envelope one) + + if ( + !message.networkTimestampCreated || + !isNumber(message.networkTimestampCreated) || + message.networkTimestampCreated <= 0 + ) { + throw new Error('networkTimestampCreated is empty <=0'); + } + + const plainTextBuffer = from_hex(message.plainTextBufferHex); // if a plaintextBufferHex is unset or not hex, this throws and we remove that message entirely + return { + ...message, + plainTextBuffer, + } as OutgoingRawMessage; + } catch (e) { + window.log.warn('failed to decode from message cache:', e.message); + return null; + } + + // let's also remove that logic with the plaintextbuffer stored as array of numbers, and use base64 strings instead + }) + ); + await this.saveToDBWithData(filtered); + return filtered; + } catch (e) { + window.log.warn('getFromStorage failed with', e.message); + return []; + } } - protected async saveToDB() { - // For each plainTextBuffer in cache, save in as a simple Array to avoid - // Node issues with JSON stringifying Buffer without strict typing - const encodedCache = [...this.cache].map(item => { - const plainTextBuffer = Array.from(item.plainTextBuffer); - - return { ...item, plainTextBuffer }; + private async saveToDBWithData(msg: Array) { + // For each plainTextBuffer in cache, save it as hex (because Uint8Array are not serializable as is) + const encodedCache = msg.map(item => { + return { ...item, plainTextBufferHex: to_hex(item.plainTextBuffer) }; }); const encodedPendingMessages = JSON.stringify(encodedCache) || '[]'; await Storage.put('pendingMessages', encodedPendingMessages); } + + protected async saveToDB() { + await this.saveToDBWithData(this.cache); + } } diff --git a/ts/session/sending/group/GroupInviteResponse.ts b/ts/session/sending/group/GroupInviteResponse.ts new file mode 100644 index 0000000000..16570ee316 --- /dev/null +++ b/ts/session/sending/group/GroupInviteResponse.ts @@ -0,0 +1,24 @@ +import { GroupPubkeyType } from 'libsession_util_nodejs'; +import { GroupUpdateInviteResponseMessage } from '../../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInviteResponseMessage'; +import { ed25519Str } from '../../utils/String'; +import { NetworkTime } from '../../../util/NetworkTime'; +import { MessageQueue } from '../MessageQueue'; + +/** + * Send the invite response to the group's swarm. An admin will handle it and update our invite pending state to not pending. + * NOTE: + * This message can only be sent once we got the keys for the group, through a poll of the swarm. + */ +export async function sendInviteResponseToGroup({ groupPk }: { groupPk: GroupPubkeyType }) { + window.log.info(`sendInviteResponseToGroup for group ${ed25519Str(groupPk)}`); + + await MessageQueue.use().sendToGroupV2({ + message: new GroupUpdateInviteResponseMessage({ + groupPk, + isApproved: true, + createAtNetworkTimestamp: NetworkTime.now(), + expirationType: 'unknown', // an invite response should not expire + expireTimer: 0, + }), + }); +} diff --git a/ts/session/types/PubKey.ts b/ts/session/types/PubKey.ts index f2be5ddb0c..563f1a6bc6 100644 --- a/ts/session/types/PubKey.ts +++ b/ts/session/types/PubKey.ts @@ -1,3 +1,4 @@ +import { GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; import { fromHexToArray } from '../utils/String'; export enum KeyPrefixType { @@ -22,12 +23,16 @@ export enum KeyPrefixType { /** * used for participants in open groups */ - groupV3 = '03', + groupV2 = '03', } +// TODO make that Pubkey class more useful, add fields for what types of pubkey it is (group, legacy group, private) + export class PubKey { public static readonly PUBKEY_LEN = 66; public static readonly PUBKEY_LEN_NO_PREFIX = PubKey.PUBKEY_LEN - 2; + public static readonly PUBKEY_BYTE_COUNT = PubKey.PUBKEY_LEN / 2; + public static readonly PUBKEY_BYTE_COUNT_NO_PREFIX = PubKey.PUBKEY_BYTE_COUNT - 1; public static readonly HEX = '[0-9a-fA-F]'; // This is a temporary fix to allow groupPubkeys created from mobile to be handled correctly @@ -38,19 +43,15 @@ export class PubKey { public static readonly PREFIX_GROUP_TEXTSECURE = '__textsecure_group__!'; // prettier-ignore private static readonly regex: RegExp = new RegExp( - `^(${PubKey.PREFIX_GROUP_TEXTSECURE})?(${KeyPrefixType.standard}|${KeyPrefixType.blinded15}|${KeyPrefixType.blinded25}|${KeyPrefixType.unblinded}|${KeyPrefixType.groupV3})?(${PubKey.HEX}{64}|${PubKey.HEX}{32})$` + `^(${PubKey.PREFIX_GROUP_TEXTSECURE})?(${KeyPrefixType.standard}|${KeyPrefixType.blinded15}|${KeyPrefixType.blinded25}|${KeyPrefixType.unblinded}|${KeyPrefixType.groupV2})?(${PubKey.HEX}{64}|${PubKey.HEX}{${PubKey.PUBKEY_BYTE_COUNT_NO_PREFIX}})$` ); /** * If you want to update this regex. Be sure that those are matches ; * __textsecure_group__!05010203040506070809a0b0c0d0e0f0ff010203040506070809a0b0c0d0e0f0ff - * __textsecure_group__!010203040506070809a0b0c0d0e0f0ff010203040506070809a0b0c0d0e0f0ff * __textsecure_group__!05010203040506070809a0b0c0d0e0f0ff - * __textsecure_group__!010203040506070809a0b0c0d0e0f0ff * 05010203040506070809a0b0c0d0e0f0ff010203040506070809a0b0c0d0e0f0ff * 03010203040506070809a0b0c0d0e0f0ff010203040506070809a0b0c0d0e0f0ff - * 010203040506070809a0b0c0d0e0f0ff010203040506070809a0B0c0d0e0f0FF * 05010203040506070809a0b0c0d0e0f0ff - * 010203040506070809a0b0c0d0e0f0ff * 030203040506070809a0b0c0d0e0f0ff */ @@ -64,7 +65,7 @@ export class PubKey { */ constructor(pubkeyString: string) { if (!PubKey.validate(pubkeyString)) { - throw new Error(`Invalid pubkey string passed: ${pubkeyString}`); + throw new Error('Invalid pubkey string passed'); } this.key = pubkeyString.toLowerCase(); } @@ -87,7 +88,7 @@ export class PubKey { const pk = value instanceof PubKey ? valAny.key : value; if (!pk || pk.length < 8) { - throw new Error('PubkKey.shorten was given an invalid PubKey to shorten.'); + throw new Error('PubKey.shorten was given an invalid PubKey to shorten.'); } return `(${pk.substring(0, 4)}...${pk.substring(pk.length - 4)})`; @@ -141,7 +142,7 @@ export class PubKey { const len = pubkey.length; // we do not support blinded prefix, see Note above - const isProdOrDevValid = len === 33 * 2 && /^05/.test(pubkey); // prod pubkey can have only 66 chars and the 05 only. + const isProdOrDevValid = len === PubKey.PUBKEY_LEN && /^05/.test(pubkey); // prod pubkey can have only 66 chars and the 05 only. // dev pubkey on testnet are now 66 chars too with the prefix, so every sessionID needs 66 chars and the prefix to be valid if (!isProdOrDevValid) { @@ -232,31 +233,18 @@ export class PubKey { return fromHexToArray(this.key); } - public withoutPrefixToArray(): Uint8Array { - return fromHexToArray(PubKey.removePrefixIfNeeded(this.key)); - } - public static isBlinded(key: string) { return key.startsWith(KeyPrefixType.blinded15) || key.startsWith(KeyPrefixType.blinded25); } - public static isClosedGroupV3(key: string) { - const regex = new RegExp(`^${KeyPrefixType.groupV3}${PubKey.HEX}{64}$`); + // TODO we should probably move those to a libsession exported ts file + public static is03Pubkey(key: string): key is GroupPubkeyType { + const regex = new RegExp(`^${KeyPrefixType.groupV2}${PubKey.HEX}{64}$`); return regex.test(key); } - public static isHexOnly(str: string) { - return new RegExp(`^${PubKey.HEX}*$`).test(str); - } - - /** - * - * @returns true if that string is a valid group (as in closed group) pubkey. - * i.e. returns true if length is 66, prefix is 05 only, and it's hex characters only - */ - public static isValidGroupPubkey(pubkey: string): boolean { - return ( - pubkey.length === 66 && pubkey.startsWith(KeyPrefixType.standard) && this.isHexOnly(pubkey) - ); + public static is05Pubkey(key: string): key is PubkeyType { + const regex = new RegExp(`^${KeyPrefixType.standard}${PubKey.HEX}{64}$`); + return regex.test(key); } } diff --git a/ts/session/types/RawMessage.ts b/ts/session/types/RawMessage.ts index a09e5e58b5..c7bff3be2d 100644 --- a/ts/session/types/RawMessage.ts +++ b/ts/session/types/RawMessage.ts @@ -1,20 +1,21 @@ import { SignalService } from '../../protobuf'; import { SnodeNamespaces } from '../apis/snode_api/namespaces'; -export type RawMessage = { +export type OutgoingRawMessage = { identifier: string; plainTextBuffer: Uint8Array; device: string; ttl: number; // ttl is in millis + networkTimestampCreated: number; encryption: SignalService.Envelope.Type; - namespace: SnodeNamespaces | null; // allowing null as when we upgrade, we might have messages awaiting sending which won't have a namespace + namespace: SnodeNamespaces; }; -// For building RawMessages from JSON -export interface PartialRawMessage { - identifier: string; - plainTextBuffer: any; - device: string; - ttl: number; - encryption: number; -} +export type StoredRawMessage = Pick< + OutgoingRawMessage, + 'identifier' | 'device' | 'ttl' | 'networkTimestampCreated' +> & { + plainTextBufferHex: string; + encryption: number; // read it as number, we need to check that it is indeed a valid encryption once loaded + namespace: number; // read it as number, we need to check that it is indeed a valid namespace once loaded +}; diff --git a/ts/session/types/with.ts b/ts/session/types/with.ts new file mode 100644 index 0000000000..bd5cebcbb0 --- /dev/null +++ b/ts/session/types/with.ts @@ -0,0 +1,13 @@ +import { PubkeyType } from 'libsession_util_nodejs'; + +export type WithMessageHash = { messageHash: string }; +export type WithTimestamp = { timestamp: number }; +export type WithSignature = { signature: string }; +export type WithSecretKey = { secretKey: Uint8Array }; + +export type WithFromMemberLeftMessage = { fromMemberLeftMessage: boolean }; // there are some changes we want to skip when doing changes triggered from a memberLeft message. + +export type WithAddWithoutHistoryMembers = { withoutHistory: Array }; +export type WithAddWithHistoryMembers = { withHistory: Array }; +export type WithRemoveMembers = { removed: Array }; +export type WithPromotedMembers = { promoted: Array }; diff --git a/ts/session/utils/AttachmentsDownload.ts b/ts/session/utils/AttachmentsDownload.ts index d9d54a2acd..3dc61d7c52 100644 --- a/ts/session/utils/AttachmentsDownload.ts +++ b/ts/session/utils/AttachmentsDownload.ts @@ -2,14 +2,14 @@ import { filter, isNumber, omit } from 'lodash'; import { v4 as uuidv4 } from 'uuid'; -import * as Constants from '../constants'; import { Data } from '../../data/data'; import { MessageModel } from '../../models/message'; import { downloadAttachment, downloadAttachmentSogsV3 } from '../../receiver/attachments'; import { initializeAttachmentLogic, processNewAttachment } from '../../types/MessageAttachment'; import { getAttachmentMetadata } from '../../types/message/initializeAttachmentMetadata'; -import { was404Error } from '../apis/snode_api/onions'; import { AttachmentDownloadMessageDetails } from '../../types/sqlSharedTypes'; +import { was404Error } from '../apis/snode_api/onions'; +import * as Constants from '../constants'; // this may cause issues if we increment that value to > 1, but only having one job will block the whole queue while one attachment is downloading const MAX_ATTACHMENT_JOB_PARALLELISM = 3; @@ -152,6 +152,7 @@ async function _runJob(job: any) { await _finishJob(null, id); return; } + const isTrusted = found.isTrustedForAttachmentDownload(); if (!isTrusted) { diff --git a/ts/session/utils/AttachmentsV2.ts b/ts/session/utils/AttachmentsV2.ts index 5f9dd07baf..b0b039c1f0 100644 --- a/ts/session/utils/AttachmentsV2.ts +++ b/ts/session/utils/AttachmentsV2.ts @@ -47,7 +47,7 @@ async function uploadV3(params: UploadParamsV2): Promise { - const groupConversation = getConversationController().get(groupId.key); - const groupMembers = groupConversation ? groupConversation.get('members') : undefined; - - if (!groupMembers) { - return []; - } - - return groupMembers.map(PubKey.cast); -} - export function encodeGroupPubKeyFromHex(hexGroupPublicKey: string | PubKey) { const pubkey = PubKey.cast(hexGroupPublicKey); return fromHexToArray(pubkey.key); diff --git a/ts/session/utils/Messages.ts b/ts/session/utils/Messages.ts index 36384d2600..6355f65b0b 100644 --- a/ts/session/utils/Messages.ts +++ b/ts/session/utils/Messages.ts @@ -1,13 +1,13 @@ -import { RawMessage } from '../types/RawMessage'; +import { OutgoingRawMessage } from '../types/RawMessage'; -import { PubKey } from '../types'; -import { ClosedGroupMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupMessage'; -import { ClosedGroupNewMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupNewMessage'; -import { ClosedGroupEncryptionPairReplyMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupEncryptionPairReplyMessage'; -import { ContentMessage } from '../messages/outgoing'; -import { ExpirationTimerUpdateMessage } from '../messages/outgoing/controlMessage/ExpirationTimerUpdateMessage'; import { SignalService } from '../../protobuf'; import { SnodeNamespaces } from '../apis/snode_api/namespaces'; +import { ContentMessage } from '../messages/outgoing'; +import { ExpirationTimerUpdateMessage } from '../messages/outgoing/controlMessage/ExpirationTimerUpdateMessage'; +import { ClosedGroupEncryptionPairReplyMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupEncryptionPairReplyMessage'; +import { ClosedGroupMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupMessage'; +import { ClosedGroupNewMessage } from '../messages/outgoing/controlMessage/group/ClosedGroupNewMessage'; +import { PubKey } from '../types'; function getEncryptionTypeFromMessageType( message: ContentMessage, @@ -38,19 +38,20 @@ export async function toRawMessage( message: ContentMessage, namespace: SnodeNamespaces, isGroup = false -): Promise { +): Promise { const ttl = message.ttl(); const plainTextBuffer = message.plainTextBuffer(); const encryption = getEncryptionTypeFromMessageType(message, isGroup); - const rawMessage: RawMessage = { + const rawMessage: OutgoingRawMessage = { identifier: message.identifier, plainTextBuffer, device: destinationPubKey.key, ttl, encryption, namespace, + networkTimestampCreated: message.createAtNetworkTimestamp, }; return rawMessage; diff --git a/ts/session/utils/Promise.ts b/ts/session/utils/Promise.ts index bbde895ab4..5cc649ad31 100644 --- a/ts/session/utils/Promise.ts +++ b/ts/session/utils/Promise.ts @@ -204,14 +204,6 @@ export async function timeout(promise: Promise, timeoutMs: number): Promis return Promise.race([timeoutPromise, promise]); } -export async function delay(timeoutMs: number = 2000): Promise { - return new Promise(resolve => { - setTimeout(() => { - resolve(true); - }, timeoutMs); - }); -} - export const sleepFor = async (ms: number, showLog = false) => { if (showLog) { // eslint-disable-next-line no-console diff --git a/ts/session/utils/String.ts b/ts/session/utils/String.ts index 9d80a34e4a..dd2cc39280 100644 --- a/ts/session/utils/String.ts +++ b/ts/session/utils/String.ts @@ -63,4 +63,5 @@ export const sanitizeSessionUsername = (inputName: string) => { return validChars; }; -export const ed25519Str = (ed25519Key: string) => `(...${ed25519Key.substr(58)})`; +export const ed25519Str = (ed25519Key: string) => + `(...${ed25519Key.length > 58 ? ed25519Key.substr(58) : ed25519Key})`; diff --git a/ts/session/utils/TaskWithTimeout.ts b/ts/session/utils/TaskWithTimeout.ts index c9f0ef5917..9bf7770878 100644 --- a/ts/session/utils/TaskWithTimeout.ts +++ b/ts/session/utils/TaskWithTimeout.ts @@ -1,4 +1,3 @@ -/* eslint-disable no-useless-return */ /* eslint-disable consistent-return */ /* eslint-disable no-promise-executor-return */ @@ -17,10 +16,7 @@ export const createTaskWithTimeout = (task: any, id: string, givenTimeout?: numb window?.log?.error(message); reject(new Error(message)); - return; } - - return; }, timeout); const clearTimer = () => { try { @@ -42,13 +38,11 @@ export const createTaskWithTimeout = (task: any, id: string, givenTimeout?: numb clearTimer(); complete = true; resolve(result); - return; }; const failure = (error: any) => { clearTimer(); complete = true; reject(error); - return; }; let promise; diff --git a/ts/session/utils/Toast.tsx b/ts/session/utils/Toast.tsx index 6c08001fca..43d6ee767b 100644 --- a/ts/session/utils/Toast.tsx +++ b/ts/session/utils/Toast.tsx @@ -3,30 +3,35 @@ import { SessionToast, SessionToastType } from '../../components/basic/SessionTo import { SectionType, showLeftPaneSection, showSettingsSection } from '../../state/ducks/section'; // if you push a toast manually with toast...() be sure to set the type attribute of the SessionToast component -export function pushToastError(id: string, title: string, description?: string) { - toast.error( - , - { toastId: id, updateId: id } - ); +export function pushToastError(id: string, description: string) { + toast.error(, { + toastId: id, + updateId: id, + }); } -export function pushToastWarning(id: string, title: string, description?: string) { +export function pushToastWarning(id: string, description: string, onToastClick?: () => void) { toast.warning( - , - { toastId: id, updateId: id } + , + { + toastId: id, + updateId: id, + } ); } export function pushToastInfo( id: string, - title: string, - description?: string, + description: string, onToastClick?: () => void, delay?: number ) { toast.info( , - { toastId: id, updateId: id } - ); +export function pushToastSuccess(id: string, description: string) { + toast.success(, { + toastId: id, + updateId: id, + }); } export function pushLoadAttachmentFailure(message?: string) { @@ -123,7 +128,6 @@ export function pushedMissedCallCauseOfPermission(conversationName: string) { const id = 'missedCallPermission'; toast.info( { return undefined; } -export const getUserED25519KeyPairBytes = async (): Promise => { +export const getUserED25519KeyPairBytes = async (): Promise => { // 'identityKey' keeps the ed25519KeyPair under a ed25519KeyPair field. // it is only set if the user migrated to the ed25519 way of generating a key const item = await UserUtils.getIdentityKeyPair(); @@ -94,19 +96,17 @@ export const getUserED25519KeyPairBytes = async (): Promise { return; } const callIceCandicates = new CallMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: NetworkTime.now(), type: SignalService.CallMessage.Type.ICE_CANDIDATES, sdpMLineIndexes: validCandidates.map(c => c.sdpMLineIndex), sdpMids: validCandidates.map(c => c.sdpMid), @@ -626,10 +625,10 @@ const iceSenderDebouncer = _.debounce(async (recipient: string) => { `sending ICE CANDIDATES MESSAGE to ${ed25519Str(recipient)} about call ${currentCallUUID}` ); - await getMessageQueue().sendToPubKeyNonDurably({ + await MessageQueue.use().sendTo1o1NonDurably({ pubkey: PubKey.cast(recipient), message: callIceCandicates, - namespace: SnodeNamespaces.UserMessages, + namespace: SnodeNamespaces.Default, }); }, 2000); @@ -912,8 +911,8 @@ export async function USER_acceptIncomingCallRequest(fromSender: string) { await peerConnection.addIceCandidate(candicate); } } - const networkTimestamp = GetNetworkTime.getNowWithNetworkOffset(); - const callerConvo = getConversationController().get(fromSender); + const networkTimestamp = NetworkTime.now(); + const callerConvo = ConvoHub.use().get(fromSender); callerConvo.set('active_at', networkTimestamp); await callerConvo.unhideIfNeeded(false); @@ -935,12 +934,13 @@ export async function USER_acceptIncomingCallRequest(fromSender: string) { await buildAnswerAndSendIt(fromSender, msgIdentifier); // consider the conversation completely approved - await callerConvo.setDidApproveMe(true); - await approveConvoAndSendResponse(fromSender); + await handleAcceptConversationRequest({ + convoId: fromSender, + }); } export async function rejectCallAlreadyAnotherCall(fromSender: string, forcedUUID: string) { - const convo = getConversationController().get(fromSender); + const convo = ConvoHub.use().get(fromSender); if (!convo) { throw new Error('rejectCallAlreadyAnotherCall non existing convo'); } @@ -953,7 +953,7 @@ export async function rejectCallAlreadyAnotherCall(fromSender: string, forcedUUI const rejectCallMessage = new CallMessage({ type: SignalService.CallMessage.Type.END_CALL, - timestamp: Date.now(), + createAtNetworkTimestamp: NetworkTime.now(), uuid: forcedUUID, expirationType, expireTimer, @@ -976,7 +976,7 @@ export async function USER_rejectIncomingCallRequest(fromSender: string) { window.log.info(`USER_rejectIncomingCallRequest ${ed25519Str(fromSender)}: ${aboutCallUUID}`); if (aboutCallUUID) { rejectedCallUUIDS.add(aboutCallUUID); - const convo = getConversationController().get(fromSender); + const convo = ConvoHub.use().get(fromSender); if (!convo) { throw new Error('USER_rejectIncomingCallRequest not existing convo'); } @@ -986,7 +986,7 @@ export async function USER_rejectIncomingCallRequest(fromSender: string) { const endCallMessage = new CallMessage({ type: SignalService.CallMessage.Type.END_CALL, - timestamp: Date.now(), + createAtNetworkTimestamp: NetworkTime.now(), uuid: aboutCallUUID, expirationType, expireTimer, @@ -1007,15 +1007,15 @@ export async function USER_rejectIncomingCallRequest(fromSender: string) { async function sendCallMessageAndSync(callmessage: CallMessage, user: string) { await Promise.all([ - getMessageQueue().sendToPubKeyNonDurably({ + MessageQueue.use().sendTo1o1NonDurably({ pubkey: PubKey.cast(user), message: callmessage, - namespace: SnodeNamespaces.UserMessages, + namespace: SnodeNamespaces.Default, }), - getMessageQueue().sendToPubKeyNonDurably({ + MessageQueue.use().sendTo1o1NonDurably({ pubkey: UserUtils.getOurPubKeyFromCache(), message: callmessage, - namespace: SnodeNamespaces.UserMessages, + namespace: SnodeNamespaces.Default, }), ]); } @@ -1027,7 +1027,7 @@ export async function USER_hangup(fromSender: string) { window.log.warn('should not be able to hangup without a currentCallUUID'); return; } - const convo = getConversationController().get(fromSender); + const convo = ConvoHub.use().get(fromSender); if (!convo) { throw new Error('USER_hangup not existing convo'); } @@ -1037,15 +1037,15 @@ export async function USER_hangup(fromSender: string) { rejectedCallUUIDS.add(currentCallUUID); const endCallMessage = new CallMessage({ type: SignalService.CallMessage.Type.END_CALL, - timestamp: Date.now(), + createAtNetworkTimestamp: NetworkTime.now(), uuid: currentCallUUID, expirationType, expireTimer, }); - void getMessageQueue().sendToPubKeyNonDurably({ + void MessageQueue.use().sendTo1o1NonDurably({ pubkey: PubKey.cast(fromSender), message: endCallMessage, - namespace: SnodeNamespaces.UserMessages, + namespace: SnodeNamespaces.Default, }); window.inboxStore?.dispatch(endCall()); @@ -1111,7 +1111,7 @@ async function buildAnswerAndSendIt(sender: string, msgIdentifier: string | null window.log.warn('failed to create answer'); return; } - const convo = getConversationController().get(sender); + const convo = ConvoHub.use().get(sender); if (!convo) { throw new Error('buildAnswerAndSendIt not existing convo'); } @@ -1120,8 +1120,8 @@ async function buildAnswerAndSendIt(sender: string, msgIdentifier: string | null DisappearingMessages.forcedDeleteAfterReadMsgSetting(convo); const answerSdp = answer.sdp; const callAnswerMessage = new CallMessage({ + createAtNetworkTimestamp: NetworkTime.now(), identifier: msgIdentifier || undefined, - timestamp: Date.now(), type: SignalService.CallMessage.Type.ANSWER, sdps: [answerSdp], uuid: currentCallUUID, @@ -1155,7 +1155,7 @@ function getCachedMessageFromCallMessage( } async function isUserApprovedOrWeSentAMessage(user: string) { - const isApproved = getConversationController().get(user)?.isApproved(); + const isApproved = ConvoHub.use().get(user)?.isApproved(); if (isApproved) { return true; @@ -1255,8 +1255,8 @@ export async function handleCallTypeOffer( window.inboxStore?.dispatch(incomingCall({ pubkey: sender })); // show a notification - const callerConvo = getConversationController().get(sender); - const convNotif = callerConvo?.get('triggerNotificationsFor') || 'disabled'; + const callerConvo = ConvoHub.use().get(sender); + const convNotif = callerConvo?.getNotificationsFor() || 'disabled'; if (convNotif === 'disabled') { window?.log?.info('notifications disabled for convo', ed25519Str(sender)); } else if (callerConvo) { @@ -1281,7 +1281,7 @@ export async function handleMissedCall( reason: 'not-approved' | 'permissions' | 'another-call-ongoing' | 'too-old-timestamp', details: WithMessageHash & WithOptExpireUpdate ) { - const incomingCallConversation = getConversationController().get(sender); + const incomingCallConversation = ConvoHub.use().get(sender); const displayname = incomingCallConversation?.getNickname() || @@ -1314,10 +1314,10 @@ async function addMissedCallMessage( sentAt: number, details: (WithMessageHash & WithOptExpireUpdate) | null ) { - const incomingCallConversation = getConversationController().get(callerPubkey); + const incomingCallConversation = ConvoHub.use().get(callerPubkey); if (incomingCallConversation.isActive() || incomingCallConversation.isHidden()) { - incomingCallConversation.set('active_at', GetNetworkTime.getNowWithNetworkOffset()); + incomingCallConversation.set('active_at', NetworkTime.now()); await incomingCallConversation.unhideIfNeeded(false); } @@ -1328,7 +1328,8 @@ async function addMissedCallMessage( callNotificationType: 'missed-call', source: callerPubkey, sent_at: sentAt, - received_at: GetNetworkTime.getNowWithNetworkOffset(), + received_at: NetworkTime.now(), + expireTimer: 0, unread: READ_MESSAGE_STATE.unread, messageHash: details?.messageHash, }); diff --git a/ts/session/utils/errors.ts b/ts/session/utils/errors.ts index 9ede6802a1..4115157e27 100644 --- a/ts/session/utils/errors.ts +++ b/ts/session/utils/errors.ts @@ -67,10 +67,23 @@ export class HTTPError extends Error { } } -export class SnodeResponseError extends Error { - constructor(message = 'sessionRpc could not talk to node') { +class BaseError extends Error { + constructor(message: string) { super(message); + this.name = this.constructor.name; // restore prototype chain Object.setPrototypeOf(this, SnodeResponseError.prototype); } } + +export class SigningFailed extends BaseError {} +export class InvalidSigningType extends BaseError {} +export class GroupV2SigningFailed extends SigningFailed {} +export class PreConditionFailed extends BaseError {} +export class DecryptionFailed extends BaseError {} +export class InvalidMessage extends BaseError {} +export class SnodeResponseError extends BaseError { + constructor(message = 'sessionRpc could not talk to node') { + super(message); + } +} diff --git a/ts/session/utils/job_runners/JobDeserialization.ts b/ts/session/utils/job_runners/JobDeserialization.ts index 0957a6aa45..dd99820d44 100644 --- a/ts/session/utils/job_runners/JobDeserialization.ts +++ b/ts/session/utils/job_runners/JobDeserialization.ts @@ -4,7 +4,7 @@ import { FakeSleepForMultiJob, } from '../../../test/session/unit/utils/job_runner/FakeSleepForJob'; import { AvatarDownload } from './jobs/AvatarDownloadJob'; -import { ConfigurationSync } from './jobs/ConfigurationSyncJob'; +import { UserSync } from './jobs/UserSyncJob'; import { PersistedJob, TypeOfPersistedData } from './PersistedJob'; export function persistedJobFromData( @@ -15,8 +15,8 @@ export function persistedJobFromData( } switch (data.jobType) { - case 'ConfigurationSyncJobType': - return new ConfigurationSync.ConfigurationSyncJob(data) as unknown as PersistedJob; + case 'UserSyncJobType': + return new UserSync.UserSyncJob(data) as unknown as PersistedJob; case 'AvatarDownloadJobType': return new AvatarDownload.AvatarDownloadJob(data) as unknown as PersistedJob; case 'FakeSleepForJobType': diff --git a/ts/session/utils/job_runners/JobRunner.ts b/ts/session/utils/job_runners/JobRunner.ts index f3ed3d8863..cc9879a9f5 100644 --- a/ts/session/utils/job_runners/JobRunner.ts +++ b/ts/session/utils/job_runners/JobRunner.ts @@ -5,12 +5,16 @@ import { timeout } from '../Promise'; import { persistedJobFromData } from './JobDeserialization'; import { AvatarDownloadPersistedData, - ConfigurationSyncPersistedData, FetchMsgExpirySwarmPersistedData, + GroupInvitePersistedData, + GroupPendingRemovalsPersistedData, + GroupPromotePersistedData, + GroupSyncPersistedData, PersistedJob, RunJobResult, TypeOfPersistedData, UpdateMsgExpirySwarmPersistedData, + UserSyncPersistedData, } from './PersistedJob'; import { JobRunnerType } from './jobs/JobRunnerType'; @@ -352,16 +356,29 @@ export class PersistedJobRunner { } } -const configurationSyncRunner = new PersistedJobRunner( - 'ConfigurationSyncJob', - null -); +const userSyncRunner = new PersistedJobRunner('UserSyncJob', null); +const groupSyncRunner = new PersistedJobRunner('GroupSyncJob', null); const avatarDownloadRunner = new PersistedJobRunner( 'AvatarDownloadJob', null ); +const groupInviteJobRunner = new PersistedJobRunner( + 'GroupInviteJob', + null +); + +const groupPromoteJobRunner = new PersistedJobRunner( + 'GroupPromoteJob', + null +); + +const groupPendingRemovalJobRunner = new PersistedJobRunner( + 'GroupPendingRemovalJob', + null +); + const updateMsgExpiryRunner = new PersistedJobRunner( 'UpdateMsgExpirySwarmJob', null @@ -373,8 +390,12 @@ const fetchSwarmMsgExpiryRunner = new PersistedJobRunner; +export interface GroupInvitePersistedData extends PersistedJobData { + jobType: 'GroupInviteJobType'; + groupPk: GroupPubkeyType; + member: PubkeyType; + inviteAsAdmin: boolean; + forceUnrevoke: boolean; +} + +export interface GroupPromotePersistedData extends PersistedJobData { + jobType: 'GroupPromoteJobType'; + groupPk: GroupPubkeyType; + member: PubkeyType; } -export interface ConfigurationSyncPersistedData extends PersistedJobData { - jobType: 'ConfigurationSyncJobType'; +export interface GroupPendingRemovalsPersistedData extends PersistedJobData { + jobType: 'GroupPendingRemovalJobType'; + groupPk: GroupPubkeyType; +} + +export interface UserSyncPersistedData extends PersistedJobData { + jobType: 'UserSyncJobType'; +} +export interface GroupSyncPersistedData extends PersistedJobData { + jobType: 'GroupSyncJobType'; +} +interface PersitedDataWithMsgIds extends PersistedJobData { + msgIds: Array; } export interface FetchMsgExpirySwarmPersistedData extends PersitedDataWithMsgIds { @@ -50,12 +76,16 @@ export interface UpdateMsgExpirySwarmPersistedData extends PersitedDataWithMsgId } export type TypeOfPersistedData = - | ConfigurationSyncPersistedData + | UserSyncPersistedData | AvatarDownloadPersistedData | FetchMsgExpirySwarmPersistedData | UpdateMsgExpirySwarmPersistedData | FakeSleepJobData - | FakeSleepForMultiJobData; + | FakeSleepForMultiJobData + | GroupSyncPersistedData + | GroupInvitePersistedData + | GroupPromotePersistedData + | GroupPendingRemovalsPersistedData; export type AddJobCheckReturn = 'skipAddSameJobPresent' | null; @@ -153,6 +183,15 @@ export abstract class PersistedJob { : null; } + public addJobCheckSameTypeAndIdentifierPresent(jobs: Array): 'skipAddSameJobPresent' | null { + return jobs.some( + j => + j.jobType === this.persistedData.jobType && j.identifier === this.persistedData.identifier + ) + ? 'skipAddSameJobPresent' + : null; + } + public addJobCheckEveryMsgIdsAlreadyPresent(jobs: Array): 'skipAddSameJobPresent' | null { if (!jobs.length) { return null; diff --git a/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts b/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts index 273ab43462..3a0fd7dc4d 100644 --- a/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts +++ b/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts @@ -6,7 +6,7 @@ import { MIME } from '../../../../types'; import { processNewAttachment } from '../../../../types/MessageAttachment'; import { autoScaleForIncomingAvatar } from '../../../../util/attachmentsUtil'; import { decryptProfile } from '../../../../util/crypto/profileEncrypter'; -import { getConversationController } from '../../../conversations'; +import { ConvoHub } from '../../../conversations'; import { fromHexToArray } from '../../String'; import { runners } from '../JobRunner'; import { @@ -24,7 +24,7 @@ const defaultMaxAttemps = 3; * Before calling this function, you have to update the related conversation profileKey and avatarPointer fields with the urls which should be downloaded, or reset them if you wanted them reset. */ export function shouldAddAvatarDownloadJob({ conversationId }: { conversationId: string }) { - const conversation = getConversationController().get(conversationId); + const conversation = ConvoHub.use().get(conversationId); if (!conversation) { // return true so we do not retry this task. window.log.warn('shouldAddAvatarDownloadJob did not corresponding conversation'); @@ -35,8 +35,8 @@ export function shouldAddAvatarDownloadJob({ conversationId }: { conversationId: window.log.warn('shouldAddAvatarDownloadJob can only be used for private convos currently'); return false; } - const prevPointer = conversation.get('avatarPointer'); - const profileKey = conversation.get('profileKey'); + const prevPointer = conversation.getAvatarPointer(); + const profileKey = conversation.getProfileKey(); const hasNoAvatar = isEmpty(prevPointer) || isEmpty(profileKey); if (hasNoAvatar) { @@ -104,7 +104,7 @@ class AvatarDownloadJob extends PersistedJob { return RunJobResult.PermanentFailure; } - let conversation = getConversationController().get(convoId); + let conversation = ConvoHub.use().get(convoId); if (!conversation) { // return true so we do not retry this task. window.log.warn('AvatarDownloadJob did not corresponding conversation'); @@ -116,8 +116,8 @@ class AvatarDownloadJob extends PersistedJob { return RunJobResult.PermanentFailure; } let changes = false; - const toDownloadPointer = conversation.get('avatarPointer'); - const toDownloadProfileKey = conversation.get('profileKey'); + const toDownloadPointer = conversation.getAvatarPointer(); + const toDownloadProfileKey = conversation.getProfileKey(); // if there is an avatar and profileKey for that user ('', null and undefined excluded), download, decrypt and save the avatar locally. if (toDownloadPointer && toDownloadProfileKey) { @@ -127,7 +127,7 @@ class AvatarDownloadJob extends PersistedJob { url: toDownloadPointer, isRaw: true, }); - conversation = getConversationController().getOrThrow(convoId); + conversation = ConvoHub.use().getOrThrow(convoId); if (!downloaded.data.byteLength) { window.log.debug(`[profileupdate] downloaded data is empty for ${conversation.id}`); @@ -161,7 +161,7 @@ class AvatarDownloadJob extends PersistedJob { data: await scaledData.blob.arrayBuffer(), contentType: MIME.IMAGE_UNKNOWN, // contentType is mostly used to generate previews and screenshot. We do not care for those in this case. }); - conversation = getConversationController().getOrThrow(convoId); + conversation = ConvoHub.use().getOrThrow(convoId); ({ path } = upgraded); } catch (e) { window?.log?.error(`[profileupdate] Could not decrypt profile image: ${e}`); diff --git a/ts/session/utils/job_runners/jobs/ConfigurationSyncJob.ts b/ts/session/utils/job_runners/jobs/ConfigurationSyncJob.ts deleted file mode 100644 index 3350d40701..0000000000 --- a/ts/session/utils/job_runners/jobs/ConfigurationSyncJob.ts +++ /dev/null @@ -1,344 +0,0 @@ -/* eslint-disable no-await-in-loop */ -import { to_hex } from 'libsodium-wrappers-sumo'; -import { compact, isArray, isEmpty, isNumber, isString } from 'lodash'; -import { v4 } from 'uuid'; -import { UserUtils } from '../..'; -import { ConfigDumpData } from '../../../../data/configDump/configDump'; -import { ConfigurationSyncJobDone } from '../../../../shims/events'; -import { ReleasedFeatures } from '../../../../util/releaseFeature'; -import { isSignInByLinking } from '../../../../util/storage'; -import { GenericWrapperActions } from '../../../../webworker/workers/browser/libsession_worker_interface'; -import { NotEmptyArrayOfBatchResults } from '../../../apis/snode_api/SnodeRequestTypes'; -import { getConversationController } from '../../../conversations'; -import { SharedConfigMessage } from '../../../messages/outgoing/controlMessage/SharedConfigMessage'; -import { MessageSender } from '../../../sending/MessageSender'; -import { allowOnlyOneAtATime } from '../../Promise'; -import { LibSessionUtil, OutgoingConfResult } from '../../libsession/libsession_utils'; -import { runners } from '../JobRunner'; -import { - AddJobCheckReturn, - ConfigurationSyncPersistedData, - PersistedJob, - RunJobResult, -} from '../PersistedJob'; -import { DURATION } from '../../../constants'; - -const defaultMsBetweenRetries = 5 * DURATION.SECONDS; // a long time between retries, to avoid running multiple jobs at the same time, when one was postponed at the same time as one already planned (5s) -const defaultMaxAttempts = 4; - -/** - * We want to run each of those jobs at least 3seconds apart. - * So every time one of that job finishes, update this timestamp, so we know when adding a new job, what is the next minimun date to run it. - */ -let lastRunConfigSyncJobTimestamp: number | null = null; - -export type SingleDestinationChanges = { - messages: Array; - allOldHashes: Array; -}; - -type SuccessfulChange = { - message: SharedConfigMessage; - updatedHash: string; -}; - -/** - * Later in the syncing logic, we want to batch-send all the updates for a pubkey in a single batch call. - * To make this easier, this function prebuilds and merges together all the changes for each pubkey. - */ -async function retrieveSingleDestinationChanges( - destination: string -): Promise { - const outgoingConfResults = await LibSessionUtil.pendingChangesForPubkey(destination); - - const compactedHashes = compact(outgoingConfResults.map(m => m.oldMessageHashes)).flat(); - - return { messages: outgoingConfResults, allOldHashes: compactedHashes }; -} - -/** - * This function is run once we get the results from the multiple batch-send. - */ -function resultsToSuccessfulChange( - result: NotEmptyArrayOfBatchResults | null, - request: SingleDestinationChanges -): Array { - const successfulChanges: Array = []; - - /** - * For each batch request, we get as result - * - status code + hash of the new config message - * - status code of the delete of all messages as given by the request hashes. - * - * As it is a sequence, the delete might have failed but the new config message might still be posted. - * So we need to check which request failed, and if it is the delete by hashes, we need to add the hash of the posted message to the list of hashes - */ - - if (!result?.length) { - return successfulChanges; - } - - for (let j = 0; j < result.length; j++) { - const batchResult = result[j]; - const messagePostedHashes = batchResult?.body?.hash; - - if ( - batchResult.code === 200 && - isString(messagePostedHashes) && - request.messages?.[j].message - ) { - // the library keeps track of the hashes to push and pushed using the hashes now - successfulChanges.push({ - updatedHash: messagePostedHashes, - message: request.messages?.[j].message, - }); - } - } - - return successfulChanges; -} - -async function buildAndSaveDumpsToDB( - changes: Array, - destination: string -): Promise { - for (let i = 0; i < changes.length; i++) { - const change = changes[i]; - const variant = LibSessionUtil.kindToVariant(change.message.kind); - - const needsDump = await LibSessionUtil.markAsPushed( - variant, - destination, - change.message.seqno.toNumber(), - change.updatedHash - ); - - if (!needsDump) { - continue; - } - const dump = await GenericWrapperActions.dump(variant); - await ConfigDumpData.saveConfigDump({ - data: dump, - publicKey: destination, - variant, - }); - } -} - -async function saveDumpsNeededToDB(destination: string) { - for (let i = 0; i < LibSessionUtil.requiredUserVariants.length; i++) { - const variant = LibSessionUtil.requiredUserVariants[i]; - const needsDump = await GenericWrapperActions.needsDump(variant); - - if (!needsDump) { - continue; - } - const dump = await GenericWrapperActions.dump(variant); - await ConfigDumpData.saveConfigDump({ - data: dump, - publicKey: destination, - variant, - }); - } -} - -class ConfigurationSyncJob extends PersistedJob { - constructor({ - identifier, - nextAttemptTimestamp, - maxAttempts, - currentRetry, - }: Partial< - Pick< - ConfigurationSyncPersistedData, - 'identifier' | 'nextAttemptTimestamp' | 'currentRetry' | 'maxAttempts' - > - >) { - super({ - jobType: 'ConfigurationSyncJobType', - identifier: identifier || v4(), - delayBetweenRetries: defaultMsBetweenRetries, - maxAttempts: isNumber(maxAttempts) ? maxAttempts : defaultMaxAttempts, - currentRetry: isNumber(currentRetry) ? currentRetry : 0, - nextAttemptTimestamp: nextAttemptTimestamp || Date.now(), - }); - } - - public async run(): Promise { - const start = Date.now(); - - try { - window.log.debug(`ConfigurationSyncJob starting ${this.persistedData.identifier}`); - - const us = UserUtils.getOurPubKeyStrFromCache(); - const ed25519Key = await UserUtils.getUserED25519KeyPairBytes(); - const conversation = getConversationController().get(us); - if (!us || !conversation || !ed25519Key) { - // we check for ed25519Key because it is needed for authenticated requests - window.log.warn('did not find our own conversation'); - return RunJobResult.PermanentFailure; - } - - // TODOLATER add a way to have a few configuration sync jobs running at the same time, but only a single one per pubkey - const thisJobDestination = us; - - // save the dumps to DB even before trying to push them, so at least we have an up to date dumps in the DB in case of crash, no network etc - await saveDumpsNeededToDB(thisJobDestination); - const userConfigLibsession = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); - - // if the feature flag is not enabled, we want to keep updating the dumps, but just not sync them. - if (!userConfigLibsession) { - this.triggerConfSyncJobDone(); - return RunJobResult.Success; - } - const singleDestChanges = await retrieveSingleDestinationChanges(thisJobDestination); - - // If there are no pending changes then the job can just complete (next time something - // is updated we want to try and run immediately so don't scuedule another run in this case) - if (isEmpty(singleDestChanges?.messages)) { - this.triggerConfSyncJobDone(); - return RunJobResult.Success; - } - const oldHashesToDelete = new Set(singleDestChanges.allOldHashes); - const msgs = singleDestChanges.messages.map(item => { - return { - namespace: item.namespace, - pubkey: thisJobDestination, - timestamp: item.message.timestamp, - ttl: item.message.ttl(), - message: item.message, - }; - }); - - // TODO use GenericWrapperActions.makeDump() once it has been merged - if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { - for (let index = 0; index < LibSessionUtil.requiredUserVariants.length; index++) { - const variant = LibSessionUtil.requiredUserVariants[index]; - - window.log.info( - `ConfigurationSyncJob: current dumps: ${variant}:`, - to_hex(await GenericWrapperActions.dump(variant)) - ); - } - window.log.info( - 'ConfigurationSyncJob: About to push changes: ', - msgs.map(m => { - return { - ...m, - message: { - ...m.message, - readyToSendData: to_hex(m.message.readyToSendData), - }, - }; - }) - ); - } - - const result = await MessageSender.sendMessagesToSnode( - msgs, - thisJobDestination, - oldHashesToDelete - ); - - const expectedReplyLength = - singleDestChanges.messages.length + (oldHashesToDelete.size ? 1 : 0); - // we do a sequence call here. If we do not have the right expected number of results, consider it a failure - if (!isArray(result) || result.length !== expectedReplyLength) { - window.log.info( - `ConfigurationSyncJob: unexpected result length: expected ${expectedReplyLength} but got ${result?.length}` - ); - // this might be a 421 error (already handled) so let's retry this request a little bit later - return RunJobResult.RetryJobIfPossible; - } - - const changes = resultsToSuccessfulChange(result, singleDestChanges); - if (isEmpty(changes)) { - return RunJobResult.RetryJobIfPossible; - } - // Now that we have the successful changes, we need to mark them as pushed and - // generate any config dumps which need to be stored - - await buildAndSaveDumpsToDB(changes, thisJobDestination); - this.triggerConfSyncJobDone(); - return RunJobResult.Success; - // eslint-disable-next-line no-useless-catch - } catch (e) { - throw e; - } finally { - window.log.debug(`ConfigurationSyncJob run() took ${Date.now() - start}ms`); - - // this is a simple way to make sure whatever happens here, we update the lastest timestamp. - // (a finally statement is always executed (no matter if exception or returns in other try/catch block) - this.updateLastTickTimestamp(); - } - } - - public serializeJob(): ConfigurationSyncPersistedData { - const fromParent = super.serializeBase(); - return fromParent; - } - - public addJobCheck(jobs: Array): AddJobCheckReturn { - return this.addJobCheckSameTypePresent(jobs); - } - - /** - * For the SharedConfig job, we do not care about the jobs already in the list. - * We never want to add a new sync configuration job if there is already one in the queue. - * This is done by the `addJobCheck` method above - */ - public nonRunningJobsToRemove(_jobs: Array) { - return []; - } - - public getJobTimeoutMs(): number { - return 20000; - } - - private updateLastTickTimestamp() { - lastRunConfigSyncJobTimestamp = Date.now(); - } - - private triggerConfSyncJobDone() { - window.Whisper.events.trigger(ConfigurationSyncJobDone); - } -} - -/** - * Queue a new Sync Configuration if needed job. - * A ConfigurationSyncJob can only be added if there is none of the same type queued already. - */ -async function queueNewJobIfNeeded() { - if (isSignInByLinking()) { - window.log.info('NOT Scheduling ConfSyncJob: as we are linking a device'); - - return; - } - if ( - !lastRunConfigSyncJobTimestamp || - lastRunConfigSyncJobTimestamp < Date.now() - defaultMsBetweenRetries - ) { - // Note: we postpone by 3s for two reasons: - // - to make sure whoever is adding this job is done with what is needs to do first - // - to allow a recently created device to process incoming config messages before pushing a new one - // this call will make sure that there is only one configuration sync job at all times - await runners.configurationSyncRunner.addJob( - new ConfigurationSyncJob({ nextAttemptTimestamp: Date.now() + 3 * DURATION.SECONDS }) - ); - } else { - // if we did run at t=100, and it is currently t=110, the difference is 10 - const diff = Math.max(Date.now() - lastRunConfigSyncJobTimestamp, 0); - // but we want to run every 30, so what we need is actually `30-10` from now = 20 - const leftBeforeNextTick = Math.max(defaultMsBetweenRetries - diff, DURATION.SECONDS); - - await runners.configurationSyncRunner.addJob( - new ConfigurationSyncJob({ nextAttemptTimestamp: Date.now() + leftBeforeNextTick }) - ); - } -} - -export const ConfigurationSync = { - ConfigurationSyncJob, - queueNewJobIfNeeded: () => - allowOnlyOneAtATime('ConfigurationSyncJob-oneAtAtTime', queueNewJobIfNeeded), -}; diff --git a/ts/session/utils/job_runners/jobs/FetchMsgExpirySwarmJob.ts b/ts/session/utils/job_runners/jobs/FetchMsgExpirySwarmJob.ts index 94c7124264..e6fa7aece2 100644 --- a/ts/session/utils/job_runners/jobs/FetchMsgExpirySwarmJob.ts +++ b/ts/session/utils/job_runners/jobs/FetchMsgExpirySwarmJob.ts @@ -48,14 +48,14 @@ class FetchMsgExpirySwarmJob extends PersistedJob m.getMessageHash())); + const messagesHashes = compact(msgModels.map(m => m.getMessageHash())); - if (isEmpty(msgModels) || isEmpty(messageHashes)) { + if (isEmpty(msgModels) || isEmpty(messagesHashes)) { return RunJobResult.Success; } const fetchedExpiries = await getExpiriesFromSnode({ - messageHashes, + messagesHashes, }); const updatedMsgModels: Array = []; diff --git a/ts/session/utils/job_runners/jobs/GroupInviteJob.ts b/ts/session/utils/job_runners/jobs/GroupInviteJob.ts new file mode 100644 index 0000000000..ed2b413e67 --- /dev/null +++ b/ts/session/utils/job_runners/jobs/GroupInviteJob.ts @@ -0,0 +1,334 @@ +import { GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; +import { debounce, difference, isNumber } from 'lodash'; +import { v4 } from 'uuid'; +import { ToastUtils, UserUtils } from '../..'; +import { groupInfoActions } from '../../../../state/ducks/metaGroups'; +import { + MetaGroupWrapperActions, + UserGroupsWrapperActions, +} from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { SnodeNamespaces } from '../../../apis/snode_api/namespaces'; +import { SnodeGroupSignature } from '../../../apis/snode_api/signature/groupSignature'; +import { PubKey } from '../../../types'; +import { runners } from '../JobRunner'; +import { + AddJobCheckReturn, + GroupInvitePersistedData, + PersistedJob, + RunJobResult, +} from '../PersistedJob'; +import { LibSessionUtil } from '../../libsession/libsession_utils'; +import { showUpdateGroupMembersByConvoId } from '../../../../interactions/conversationInteractions'; +import { ConvoHub } from '../../../conversations'; +import { MessageQueue } from '../../../sending'; +import { NetworkTime } from '../../../../util/NetworkTime'; +import { SubaccountUnrevokeSubRequest } from '../../../apis/snode_api/SnodeRequestTypes'; +import { GroupSync } from './GroupSyncJob'; + +const defaultMsBetweenRetries = 10000; +const defaultMaxAttempts = 1; + +type JobExtraArgs = { + groupPk: GroupPubkeyType; + member: PubkeyType; + inviteAsAdmin: boolean; + /** + * When inviting a member, we usually only want to sent a message to his swarm. + * In the case of a invitation resend process though, we also want to make sure his token is unrevoked from the group's swarm. + * + */ + forceUnrevoke: boolean; +}; + +export function shouldAddJob(args: JobExtraArgs) { + if (UserUtils.isUsFromCache(args.member)) { + return false; + } + + return true; +} + +const invitesFailed = new Map< + GroupPubkeyType, + { + debouncedCall: (groupPk: GroupPubkeyType) => void; + failedMembers: Array; + } +>(); + +async function addJob({ groupPk, member, inviteAsAdmin, forceUnrevoke }: JobExtraArgs) { + if (shouldAddJob({ groupPk, member, inviteAsAdmin, forceUnrevoke })) { + const groupInviteJob = new GroupInviteJob({ + groupPk, + member, + inviteAsAdmin, + forceUnrevoke, + nextAttemptTimestamp: Date.now(), + }); + window.log.debug(`addGroupInviteJob: adding group invite for ${groupPk}:${member} `); + + window?.inboxStore?.dispatch( + groupInfoActions.refreshGroupDetailsFromWrapper({ groupPk }) as any + ); + await LibSessionUtil.saveDumpsToDb(groupPk); + + await runners.groupInviteJobRunner.addJob(groupInviteJob); + + if (inviteAsAdmin) { + window?.inboxStore?.dispatch( + groupInfoActions.setPromotionPending({ groupPk, pubkey: member, sending: true }) + ); + } else { + window?.inboxStore?.dispatch( + groupInfoActions.setInvitePending({ groupPk, pubkey: member, sending: true }) + ); + } + } +} + +function displayFailedInvitesForGroup(groupPk: GroupPubkeyType) { + const thisGroupFailures = invitesFailed.get(groupPk); + + if (!thisGroupFailures || thisGroupFailures.failedMembers.length === 0) { + return; + } + const onToastClick = () => { + void showUpdateGroupMembersByConvoId(groupPk); + }; + const count = thisGroupFailures.failedMembers.length; + const groupName = ConvoHub.use().get(groupPk)?.getRealSessionUsername() || window.i18n('unknown'); + const firstUserName = + ConvoHub.use().get(thisGroupFailures.failedMembers?.[0])?.getRealSessionUsername() || + window.i18n('unknown'); + const secondUserName = + ConvoHub.use().get(thisGroupFailures.failedMembers?.[1])?.getRealSessionUsername() || + window.i18n('unknown'); + switch (count) { + case 1: + ToastUtils.pushToastWarning( + `invite-failed${groupPk}`, + window.i18n('groupInviteFailedUser', { group_name: groupName, name: firstUserName }), + onToastClick + ); + break; + case 2: + ToastUtils.pushToastWarning( + `invite-failed${groupPk}`, + window.i18n('groupInviteFailedTwo', { + group_name: groupName, + name: firstUserName, + other_name: secondUserName, + }), + onToastClick + ); + break; + default: + ToastUtils.pushToastWarning( + `invite-failed${groupPk}`, + window.i18n('groupInviteFailedMultiple', { + group_name: groupName, + name: firstUserName, + count: thisGroupFailures.failedMembers.length - 1, + }), + onToastClick + ); + } + // toast was displayed empty the list + thisGroupFailures.failedMembers = []; +} + +class GroupInviteJob extends PersistedJob { + constructor({ + groupPk, + member, + inviteAsAdmin, + nextAttemptTimestamp, + maxAttempts, + currentRetry, + forceUnrevoke, + identifier, + }: Pick & + Partial< + Pick< + GroupInvitePersistedData, + | 'nextAttemptTimestamp' + | 'identifier' + | 'maxAttempts' + | 'delayBetweenRetries' + | 'currentRetry' + > + >) { + super({ + jobType: 'GroupInviteJobType', + identifier: identifier || v4(), + member, + groupPk, + inviteAsAdmin, + forceUnrevoke, + delayBetweenRetries: defaultMsBetweenRetries, + maxAttempts: isNumber(maxAttempts) ? maxAttempts : defaultMaxAttempts, + nextAttemptTimestamp: nextAttemptTimestamp || Date.now() + defaultMsBetweenRetries, + currentRetry: isNumber(currentRetry) ? currentRetry : 0, + }); + } + + public async run(): Promise { + const { groupPk, member, inviteAsAdmin, jobType, identifier } = this.persistedData; + + window.log.info( + `running job ${jobType} with groupPk:"${groupPk}" member:${member} inviteAsAdmin:${inviteAsAdmin} id:"${identifier}" ` + ); + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!group || !group.secretKey || !group.name) { + window.log.warn(`GroupInviteJob: Did not find group in wrapper or no valid info in wrapper`); + return RunJobResult.PermanentFailure; + } + + if (UserUtils.isUsFromCache(member)) { + return RunJobResult.Success; // nothing to do for us, we get the update from our user's libsession wrappers + } + let failed = true; + try { + if (this.persistedData.forceUnrevoke) { + const token = await MetaGroupWrapperActions.swarmSubAccountToken(groupPk, member); + const unrevokeSubRequest = new SubaccountUnrevokeSubRequest({ + groupPk, + revokeTokenHex: [token], + timestamp: NetworkTime.now(), + secretKey: group.secretKey, + }); + const sequenceResult = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + unrevokeSubRequest, + extraStoreRequests: [], + }); + if (sequenceResult !== RunJobResult.Success) { + throw new Error( + 'GroupInviteJob: SubaccountUnrevokeSubRequest push() did not return success' + ); + } + } + + const inviteDetails = inviteAsAdmin + ? await SnodeGroupSignature.getGroupPromoteMessage({ + groupName: group.name, + member, + secretKey: group.secretKey, + groupPk, + }) + : await SnodeGroupSignature.getGroupInviteMessage({ + groupName: group.name, + member, + secretKey: group.secretKey, + groupPk, + }); + + const storedAt = await MessageQueue.use().sendTo1o1NonDurably({ + message: inviteDetails, + namespace: SnodeNamespaces.Default, + pubkey: PubKey.cast(member), + }); + if (storedAt !== null) { + failed = false; + } + } catch (e) { + window.log.warn( + `${jobType} with groupPk:"${groupPk}" member: ${member} id:"${identifier}" failed with ${e.message}` + ); + failed = true; + } finally { + window.log.info( + `${jobType} with groupPk:"${groupPk}" member: ${member} id:"${identifier}" finished. failed:${failed}` + ); + try { + await MetaGroupWrapperActions.memberSetInvited(groupPk, member, failed); + // Depending on this field, we either send an invite or an invite-as-admin message. + // When we do send an invite-as-admin we also need to update the promoted state, so that the invited members + // knows he needs to accept the promotion when accepting the invite + if (inviteAsAdmin) { + if (failed) { + await MetaGroupWrapperActions.memberSetPromotionFailed(groupPk, member); + } else { + await MetaGroupWrapperActions.memberSetPromotionSent(groupPk, member); + } + } + } catch (e) { + window.log.warn('GroupInviteJob memberSetInvited failed with', e.message); + } + + updateFailedStateForMember(groupPk, member, failed); + + if (inviteAsAdmin) { + window?.inboxStore?.dispatch( + groupInfoActions.setPromotionPending({ groupPk, pubkey: member, sending: false }) + ); + } else { + window?.inboxStore?.dispatch( + groupInfoActions.setInvitePending({ groupPk, pubkey: member, sending: false }) + ); + } + window?.inboxStore?.dispatch( + groupInfoActions.refreshGroupDetailsFromWrapper({ groupPk }) as any + ); + await LibSessionUtil.saveDumpsToDb(groupPk); + } + // return true so this job is marked as a success and we don't need to retry it + return RunJobResult.Success; + } + + public serializeJob(): GroupInvitePersistedData { + return super.serializeBase(); + } + + public nonRunningJobsToRemove(_jobs: Array) { + return []; + } + + public addJobCheck(jobs: Array): AddJobCheckReturn { + // avoid adding the same job if the exact same one is already planned + const hasSameJob = jobs.some(j => { + return j.groupPk === this.persistedData.groupPk && j.member === this.persistedData.member; + }); + + if (hasSameJob) { + return 'skipAddSameJobPresent'; + } + + return null; + } + + public getJobTimeoutMs(): number { + return 15000; + } +} + +export const GroupInvite = { + GroupInviteJob, + addJob, +}; +function updateFailedStateForMember(groupPk: GroupPubkeyType, member: PubkeyType, failed: boolean) { + let thisGroupFailure = invitesFailed.get(groupPk); + + if (!failed) { + // invite sent success, remove a pending failure state from the list of toasts to display + if (thisGroupFailure) { + thisGroupFailure.failedMembers = difference(thisGroupFailure.failedMembers, [member]); + } + + return; + } + // invite sent failed, append the member to that groupFailure member list, and trigger the debounce call + if (!thisGroupFailure) { + thisGroupFailure = { + failedMembers: [], + debouncedCall: debounce(displayFailedInvitesForGroup, 1000), // TODO change to 5000 + }; + } + + if (!thisGroupFailure.failedMembers.includes(member)) { + thisGroupFailure.failedMembers.push(member); + } + + invitesFailed.set(groupPk, thisGroupFailure); + thisGroupFailure.debouncedCall(groupPk); +} diff --git a/ts/session/utils/job_runners/jobs/GroupPendingRemovalsJob.ts b/ts/session/utils/job_runners/jobs/GroupPendingRemovalsJob.ts new file mode 100644 index 0000000000..7005d9c887 --- /dev/null +++ b/ts/session/utils/job_runners/jobs/GroupPendingRemovalsJob.ts @@ -0,0 +1,279 @@ +/* eslint-disable no-await-in-loop */ +import { WithGroupPubkey } from 'libsession_util_nodejs'; +import { compact, isEmpty, isNumber } from 'lodash'; +import { v4 } from 'uuid'; +import { StringUtils } from '../..'; +import { Data } from '../../../../data/data'; +import { deleteMessagesFromSwarmOnly } from '../../../../interactions/conversations/unsendingInteractions'; +import { + MetaGroupWrapperActions, + MultiEncryptWrapperActions, + UserGroupsWrapperActions, +} from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { + StoreGroupMessageSubRequest, + StoreGroupRevokedRetrievableSubRequest, +} from '../../../apis/snode_api/SnodeRequestTypes'; +import { StoreGroupRequestFactory } from '../../../apis/snode_api/factories/StoreGroupRequestFactory'; +import { RevokeChanges, SnodeAPIRevoke } from '../../../apis/snode_api/revokeSubaccount'; +import { concatUInt8Array, getSodiumRenderer } from '../../../crypto'; +import { GroupUpdateDeleteMemberContentMessage } from '../../../messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateDeleteMemberContentMessage'; +import { MessageSender } from '../../../sending'; +import { fromHexToArray } from '../../String'; +import { runners } from '../JobRunner'; +import { + AddJobCheckReturn, + GroupPendingRemovalsPersistedData, + PersistedJob, + RunJobResult, +} from '../PersistedJob'; +import { GroupSync } from './GroupSyncJob'; +import { NetworkTime } from '../../../../util/NetworkTime'; +import { + WithAddWithHistoryMembers, + WithAddWithoutHistoryMembers, + WithRemoveMembers, + WithSecretKey, +} from '../../../types/with'; + +const defaultMsBetweenRetries = 10000; +const defaultMaxAttempts = 1; + +type JobExtraArgs = Pick; + +async function addJob({ groupPk }: JobExtraArgs) { + const pendingRemovalJob = new GroupPendingRemovalsJob({ + groupPk, + nextAttemptTimestamp: Date.now() + 1000, // postpone by 1s + }); + window.log.debug(`addGroupPendingRemovalJob: adding group pending removal for ${groupPk} `); + await runners.groupPendingRemovalJobRunner.addJob(pendingRemovalJob); +} + +async function getPendingRevokeParams({ + withoutHistory, + withHistory, + removed, + groupPk, + secretKey, +}: WithGroupPubkey & + WithSecretKey & + WithAddWithoutHistoryMembers & + WithAddWithHistoryMembers & + WithRemoveMembers) { + const revokeChanges: RevokeChanges = []; + const unrevokeChanges: RevokeChanges = []; + + for (let index = 0; index < withoutHistory.length; index++) { + const m = withoutHistory[index]; + const token = await MetaGroupWrapperActions.swarmSubAccountToken(groupPk, m); + unrevokeChanges.push({ action: 'unrevoke_subaccount', tokenToRevokeHex: token }); + } + for (let index = 0; index < withHistory.length; index++) { + const m = withHistory[index]; + const token = await MetaGroupWrapperActions.swarmSubAccountToken(groupPk, m); + unrevokeChanges.push({ action: 'unrevoke_subaccount', tokenToRevokeHex: token }); + } + for (let index = 0; index < removed.length; index++) { + const m = removed[index]; + const token = await MetaGroupWrapperActions.swarmSubAccountToken(groupPk, m); + revokeChanges.push({ action: 'revoke_subaccount', tokenToRevokeHex: token }); + } + + return SnodeAPIRevoke.getRevokeSubaccountParams(groupPk, secretKey, { + revokeChanges, + unrevokeChanges, + }); +} + +class GroupPendingRemovalsJob extends PersistedJob { + constructor({ + groupPk, + nextAttemptTimestamp, + maxAttempts, + currentRetry, + identifier, + }: Pick & + Partial< + Pick< + GroupPendingRemovalsPersistedData, + | 'nextAttemptTimestamp' + | 'identifier' + | 'maxAttempts' + | 'delayBetweenRetries' + | 'currentRetry' + > + >) { + super({ + jobType: 'GroupPendingRemovalJobType', + identifier: identifier || v4(), + groupPk, + delayBetweenRetries: defaultMsBetweenRetries, + maxAttempts: isNumber(maxAttempts) ? maxAttempts : defaultMaxAttempts, + nextAttemptTimestamp: nextAttemptTimestamp || Date.now() + defaultMsBetweenRetries, + currentRetry: isNumber(currentRetry) ? currentRetry : 0, + }); + } + + public async run() { + const { groupPk, jobType, identifier } = this.persistedData; + + window.log.info(`running job ${jobType} with groupPk:"${groupPk}" id:"${identifier}" `); + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!group || !group.secretKey || isEmpty(group.secretKey)) { + window.log.warn( + `GroupPendingRemovalsJob: Did not find group in wrapper or no valid info in wrapper` + ); + return RunJobResult.PermanentFailure; + } + + try { + const pendingRemovals = await MetaGroupWrapperActions.memberGetAllPendingRemovals(groupPk); + + if (!pendingRemovals.length) { + return RunJobResult.Success; + } + const deleteMessagesOfMembers = pendingRemovals + .filter(m => m.removedStatus === 'REMOVED_MEMBER_AND_MESSAGES') + .map(m => m.pubkeyHex); + + const sessionIdsHex = pendingRemovals.map(m => m.pubkeyHex); + const sessionIds = sessionIdsHex.map(m => fromHexToArray(m).slice(1)); + const currentGen = await MetaGroupWrapperActions.keyGetCurrentGen(groupPk); + const dataToEncrypt = sessionIds.map(s => { + return concatUInt8Array(s, StringUtils.stringToUint8Array(`${currentGen}`)); + }); + + const multiEncryptedMessage = await MultiEncryptWrapperActions.multiEncrypt({ + messages: dataToEncrypt, + recipients: sessionIds, + ed25519SecretKey: group.secretKey, + domain: 'SessionGroupKickedMessage', + }); + // first, get revoke requests that need to be pushed for leaving member + const revokeUnrevokeParams = await getPendingRevokeParams({ + groupPk, + withHistory: [], + withoutHistory: [], + removed: sessionIdsHex, + secretKey: group.secretKey, + }); + + const multiEncryptRequest = new StoreGroupRevokedRetrievableSubRequest({ + encryptedData: multiEncryptedMessage, + groupPk, + secretKey: group.secretKey, + }); + + const revokeRequests = compact([ + revokeUnrevokeParams.revokeSubRequest ? revokeUnrevokeParams.revokeSubRequest : null, + revokeUnrevokeParams.unrevokeSubRequest ? revokeUnrevokeParams.unrevokeSubRequest : null, + ]); + let storeRequests: Array = []; + if (deleteMessagesOfMembers.length) { + const deleteContentMsg = new GroupUpdateDeleteMemberContentMessage({ + createAtNetworkTimestamp: NetworkTime.now(), + expirationType: 'unknown', // GroupUpdateDeleteMemberContentMessage this is not displayed so not expiring. + expireTimer: 0, + groupPk, + memberSessionIds: deleteMessagesOfMembers, + messageHashes: [], + sodium: await getSodiumRenderer(), + secretKey: group.secretKey, + }); + storeRequests = await StoreGroupRequestFactory.makeGroupMessageSubRequest( + [deleteContentMsg], + { authData: null, secretKey: group.secretKey } + ); + } + + const sortedSubRequests = compact([multiEncryptRequest, ...revokeRequests, ...storeRequests]); + const result = await MessageSender.sendEncryptedDataToSnode({ + sortedSubRequests, + destination: groupPk, + method: 'sequence', + }); + + if ( + !result || + result.length !== sortedSubRequests.length || + result.some(m => m.code !== 200) + ) { + window.log.warn( + 'GroupPendingRemovalsJob: sendEncryptedDataToSnode unexpected result length or content. Scheduling retry if possible' + ); + return RunJobResult.RetryJobIfPossible; + } + + // both requests success, remove the members from the group member entirely and sync + await MetaGroupWrapperActions.memberEraseAndRekey(groupPk, sessionIdsHex); + await GroupSync.queueNewJobIfNeeded(groupPk); + + try { + if (deleteMessagesOfMembers.length) { + const models = await Data.findAllMessageFromSendersInConversation({ + groupPk, + toRemove: deleteMessagesOfMembers, + signatureTimestamp: NetworkTime.now(), + }); + + const messageHashes = compact(models.map(m => m.getMessageHash())); + + if (messageHashes.length) { + await deleteMessagesFromSwarmOnly(messageHashes, groupPk); + } + for (let index = 0; index < models.length; index++) { + const messageModel = models[index]; + try { + // eslint-disable-next-line no-await-in-loop + await messageModel.markAsDeleted(); + } catch (e) { + window.log.warn( + `GroupPendingRemoval markAsDeleted of ${messageModel.getMessageHash()} failed with`, + e.message + ); + } + } + } + } catch (e) { + window.log.warn('GroupPendingRemovalsJob allowed to fail part failed with:', e.message); + } + + // return true so this job is marked as a success and we don't need to retry it + return RunJobResult.Success; + } catch (e) { + window.log.warn('GroupPendingRemovalsJob failed with', e.message); + return RunJobResult.RetryJobIfPossible; + } + } + + public serializeJob() { + return super.serializeBase(); + } + + public nonRunningJobsToRemove(_jobs: Array) { + return []; + } + + public addJobCheck(jobs: Array): AddJobCheckReturn { + // avoid adding the same job if the exact same one is already planned + const hasSameJob = jobs.some(j => { + return j.groupPk === this.persistedData.groupPk; + }); + + if (hasSameJob) { + return 'skipAddSameJobPresent'; + } + + return null; + } + + public getJobTimeoutMs(): number { + return 15000; + } +} + +export const GroupPendingRemovals = { + addJob, + getPendingRevokeParams, +}; diff --git a/ts/session/utils/job_runners/jobs/GroupPromoteJob.ts b/ts/session/utils/job_runners/jobs/GroupPromoteJob.ts new file mode 100644 index 0000000000..d36f6306ee --- /dev/null +++ b/ts/session/utils/job_runners/jobs/GroupPromoteJob.ts @@ -0,0 +1,163 @@ +import { GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; +import { isNumber } from 'lodash'; +import { v4 } from 'uuid'; +import { UserUtils } from '../..'; +import { groupInfoActions } from '../../../../state/ducks/metaGroups'; +import { + MetaGroupWrapperActions, + UserGroupsWrapperActions, +} from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { SnodeNamespaces } from '../../../apis/snode_api/namespaces'; +import { SnodeGroupSignature } from '../../../apis/snode_api/signature/groupSignature'; +import { PubKey } from '../../../types'; +import { runners } from '../JobRunner'; +import { + AddJobCheckReturn, + GroupPromotePersistedData, + PersistedJob, + RunJobResult, +} from '../PersistedJob'; +import { MessageQueue } from '../../../sending'; + +const defaultMsBetweenRetries = 10000; +const defaultMaxAttempts = 1; + +type JobExtraArgs = { + groupPk: GroupPubkeyType; + member: PubkeyType; +}; + +export function shouldAddJob(args: JobExtraArgs) { + if (UserUtils.isUsFromCache(args.member)) { + return false; + } + + return true; +} + +async function addJob({ groupPk, member }: JobExtraArgs) { + if (shouldAddJob({ groupPk, member })) { + const groupPromoteJob = new GroupPromoteJob({ + groupPk, + member, + nextAttemptTimestamp: Date.now(), + }); + window.log.debug(`addGroupPromoteJob: adding group promote for ${groupPk}:${member} `); + await runners.groupPromoteJobRunner.addJob(groupPromoteJob); + window?.inboxStore?.dispatch( + groupInfoActions.setPromotionPending({ groupPk, pubkey: member, sending: true }) + ); + } +} + +class GroupPromoteJob extends PersistedJob { + constructor({ + groupPk, + member, + nextAttemptTimestamp, + maxAttempts, + currentRetry, + identifier, + }: Pick & + Partial< + Pick< + GroupPromotePersistedData, + | 'nextAttemptTimestamp' + | 'identifier' + | 'maxAttempts' + | 'delayBetweenRetries' + | 'currentRetry' + > + >) { + super({ + jobType: 'GroupPromoteJobType', + identifier: identifier || v4(), + member, + groupPk, + delayBetweenRetries: defaultMsBetweenRetries, + maxAttempts: isNumber(maxAttempts) ? maxAttempts : defaultMaxAttempts, + nextAttemptTimestamp: nextAttemptTimestamp || Date.now() + defaultMsBetweenRetries, + currentRetry: isNumber(currentRetry) ? currentRetry : 0, + }); + } + + public async run(): Promise { + const { groupPk, member, jobType, identifier } = this.persistedData; + + window.log.info( + `running job ${jobType} with groupPk:"${groupPk}" member: ${member} id:"${identifier}" ` + ); + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!group || !group.secretKey || !group.name) { + window.log.warn(`GroupPromoteJob: Did not find group in wrapper or no valid info in wrapper`); + return RunJobResult.PermanentFailure; + } + + if (UserUtils.isUsFromCache(member)) { + return RunJobResult.Success; + } + let failed = true; + try { + const message = await SnodeGroupSignature.getGroupPromoteMessage({ + member, + secretKey: group.secretKey, + groupPk, + groupName: group.name, + }); + + const storedAt = await MessageQueue.use().sendTo1o1NonDurably({ + message, + namespace: SnodeNamespaces.Default, + pubkey: PubKey.cast(member), + }); + if (storedAt !== null) { + failed = false; + } + } finally { + window?.inboxStore?.dispatch( + groupInfoActions.setPromotionPending({ groupPk, pubkey: member, sending: false }) + ); + try { + if (failed) { + await MetaGroupWrapperActions.memberSetPromotionFailed(groupPk, member); + } else { + await MetaGroupWrapperActions.memberSetPromotionSent(groupPk, member); + } + } catch (e) { + window.log.warn('GroupPromoteJob memberSetPromoted failed with', e.message); + } + } + // return true so this job is marked as a success and we don't need to retry it + return RunJobResult.Success; + } + + public serializeJob(): GroupPromotePersistedData { + return super.serializeBase(); + } + + public nonRunningJobsToRemove(_jobs: Array) { + return []; + } + + public addJobCheck(jobs: Array): AddJobCheckReturn { + // avoid adding the same job if the exact same one is already planned + const hasSameJob = jobs.some(j => { + return j.groupPk === this.persistedData.groupPk && j.member === this.persistedData.member; + }); + + if (hasSameJob) { + return 'skipAddSameJobPresent'; + } + + return null; + } + + public getJobTimeoutMs(): number { + return 15000; + } +} + +export const GroupPromote = { + GroupPromoteJob, + addJob, +}; diff --git a/ts/session/utils/job_runners/jobs/GroupSyncJob.ts b/ts/session/utils/job_runners/jobs/GroupSyncJob.ts new file mode 100644 index 0000000000..bbdfa16996 --- /dev/null +++ b/ts/session/utils/job_runners/jobs/GroupSyncJob.ts @@ -0,0 +1,325 @@ +/* eslint-disable no-await-in-loop */ +import { GroupPubkeyType, WithGroupPubkey } from 'libsession_util_nodejs'; +import { to_hex } from 'libsodium-wrappers-sumo'; +import { compact, isArray, isEmpty, isNumber } from 'lodash'; +import { UserUtils } from '../..'; +import { assertUnreachable } from '../../../../types/sqlSharedTypes'; +import { isSignInByLinking } from '../../../../util/storage'; +import { + MetaGroupWrapperActions, + UserGroupsWrapperActions, +} from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { + DeleteAllFromGroupMsgNodeSubRequest, + DeleteHashesFromGroupNodeSubRequest, + StoreGroupKeysSubRequest, + StoreGroupMessageSubRequest, + SubaccountRevokeSubRequest, + SubaccountUnrevokeSubRequest, +} from '../../../apis/snode_api/SnodeRequestTypes'; +import { DeleteGroupHashesFactory } from '../../../apis/snode_api/factories/DeleteGroupHashesRequestFactory'; +import { StoreGroupRequestFactory } from '../../../apis/snode_api/factories/StoreGroupRequestFactory'; +import { SnodeNamespaces } from '../../../apis/snode_api/namespaces'; +import { WithRevokeSubRequest } from '../../../apis/snode_api/types'; +import { ConvoHub } from '../../../conversations'; +import { MessageSender } from '../../../sending/MessageSender'; +import { PubKey } from '../../../types'; +import { allowOnlyOneAtATime } from '../../Promise'; +import { ed25519Str } from '../../String'; +import { GroupSuccessfulChange, LibSessionUtil } from '../../libsession/libsession_utils'; +import { runners } from '../JobRunner'; +import { + AddJobCheckReturn, + GroupSyncPersistedData, + PersistedJob, + RunJobResult, +} from '../PersistedJob'; + +const defaultMsBetweenRetries = 15000; // a long time between retries, to avoid running multiple jobs at the same time, when one was postponed at the same time as one already planned (5s) +const defaultMaxAttempts = 2; + +/** + * We want to run each of those jobs at least 3 seconds apart. + * So every time one of that job finishes, update this timestamp, so we know when adding a new job, what is the next minimum date to run it. + */ +const lastRunConfigSyncJobTimestamps = new Map(); + +async function confirmPushedAndDump( + changes: Array, + groupPk: GroupPubkeyType +): Promise { + const toConfirm: Parameters = [ + groupPk, + { groupInfo: null, groupMember: null }, + ]; + for (let i = 0; i < changes.length; i++) { + const change = changes[i]; + const namespace = change.pushed.namespace; + switch (namespace) { + case SnodeNamespaces.ClosedGroupInfo: { + if (change.pushed.seqno) { + toConfirm[1].groupInfo = [change.pushed.seqno.toNumber(), change.updatedHash]; + } + break; + } + case SnodeNamespaces.ClosedGroupMembers: { + toConfirm[1].groupMember = [change.pushed.seqno.toNumber(), change.updatedHash]; + break; + } + case SnodeNamespaces.ClosedGroupKeys: { + // TODO chunk 2 closed group + break; + } + default: + assertUnreachable(namespace, 'buildAndSaveDumpsToDB assertUnreachable'); + } + } + + await MetaGroupWrapperActions.metaConfirmPushed(...toConfirm); + return LibSessionUtil.saveDumpsToDb(groupPk); +} + +async function pushChangesToGroupSwarmIfNeeded({ + revokeSubRequest, + unrevokeSubRequest, + groupPk, + supplementalKeysSubRequest, + deleteAllMessagesSubRequest, + extraStoreRequests, +}: WithGroupPubkey & + WithRevokeSubRequest & { + supplementalKeysSubRequest?: StoreGroupKeysSubRequest; + deleteAllMessagesSubRequest?: DeleteAllFromGroupMsgNodeSubRequest; + extraStoreRequests: Array; + }): Promise { + // save the dumps to DB even before trying to push them, so at least we have an up to date dumps in the DB in case of crash, no network etc + await LibSessionUtil.saveDumpsToDb(groupPk); + const { allOldHashes, messages: pendingConfigData } = + await LibSessionUtil.pendingChangesForGroup(groupPk); + // If there are no pending changes nor any requests to be made, + // then the job can just complete (next time something is updated we want + // to try and run immediately so don't schedule another run in this case) + if ( + isEmpty(pendingConfigData) && + isEmpty(supplementalKeysSubRequest) && + isEmpty(revokeSubRequest) && + isEmpty(unrevokeSubRequest) && + isEmpty(deleteAllMessagesSubRequest) && + isEmpty(extraStoreRequests) + ) { + window.log.debug(`pushChangesToGroupSwarmIfNeeded: ${ed25519Str(groupPk)}: nothing to push`); + return RunJobResult.Success; + } + + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!group) { + window.log.debug(`pushChangesToGroupSwarmIfNeeded: ${ed25519Str(groupPk)}: group not found`); + return RunJobResult.Success; + } + + if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { + const dumps = await MetaGroupWrapperActions.metaMakeDump(groupPk); + window.log.info( + `pushChangesToGroupSwarmIfNeeded: current meta dump: ${ed25519Str(groupPk)}:`, + to_hex(dumps) + ); + } + + const pendingConfigRequests = StoreGroupRequestFactory.makeStoreGroupConfigSubRequest({ + group, + pendingConfigData, + }); + + const deleteHashesSubRequest = DeleteGroupHashesFactory.makeGroupHashesToDeleteSubRequest({ + group, + messagesHashes: allOldHashes, + }); + const extraRequests = compact([ + deleteHashesSubRequest, + revokeSubRequest, + unrevokeSubRequest, + deleteAllMessagesSubRequest, + ]); + + const extraRequestWithExpectedResults = extraRequests.filter( + m => + m instanceof SubaccountRevokeSubRequest || + m instanceof SubaccountUnrevokeSubRequest || + m instanceof DeleteAllFromGroupMsgNodeSubRequest || + m instanceof DeleteHashesFromGroupNodeSubRequest + ); + + const sortedSubRequests = compact([ + supplementalKeysSubRequest, // this needs to be stored first + ...pendingConfigRequests, // groupKeys are first in this array, so all good, then groupInfos are next + ...extraStoreRequests, // this can be stored anytime + ...extraRequests, + ]); + + const result = await MessageSender.sendEncryptedDataToSnode({ + // Note: this is on purpose that supplementalKeysSubRequest is before pendingConfigRequests + // as this is to avoid a race condition where a device is polling right + // while we are posting the configs (already encrypted with the new keys) + sortedSubRequests, + destination: groupPk, + method: 'sequence', + }); + + const expectedReplyLength = + (supplementalKeysSubRequest ? 1 : 0) + // we are sending all the supplemental keys as a single sub request + pendingConfigRequests.length + // each of those are sent as a sub request + extraStoreRequests.length + // each of those are sent as a sub request + extraRequestWithExpectedResults.length; // each of those are sent as a sub request, but they don't all return something... + + // we do a sequence call here. If we do not have the right expected number of results, consider it a failure + if (!isArray(result) || result.length !== expectedReplyLength) { + window.log.info( + `GroupSyncJob: unexpected result length: expected ${expectedReplyLength} but got ${result?.length}` + ); + + // this might be a 421 error (already handled) so let's retry this request a little bit later + return RunJobResult.RetryJobIfPossible; + } + + const changes = LibSessionUtil.batchResultsToGroupSuccessfulChange(result, { + allOldHashes, + messages: pendingConfigData, + }); + + if ((allOldHashes.size || pendingConfigData.length) && isEmpty(changes)) { + return RunJobResult.RetryJobIfPossible; + } + + // Now that we have the successful changes, we need to mark them as pushed and + // generate any config dumps which need to be stored + await confirmPushedAndDump(changes, groupPk); + return RunJobResult.Success; +} + +class GroupSyncJob extends PersistedJob { + constructor({ + identifier, // this has to be the pubkey to which we + nextAttemptTimestamp, + maxAttempts, + currentRetry, + }: Pick & + Partial< + Pick + >) { + super({ + jobType: 'GroupSyncJobType', + identifier, + delayBetweenRetries: defaultMsBetweenRetries, + maxAttempts: isNumber(maxAttempts) ? maxAttempts : defaultMaxAttempts, + currentRetry: isNumber(currentRetry) ? currentRetry : 0, + nextAttemptTimestamp: nextAttemptTimestamp || Date.now(), + }); + } + + public async run(): Promise { + const start = Date.now(); + const thisJobDestination = this.persistedData.identifier; + + try { + if (!PubKey.is03Pubkey(thisJobDestination)) { + return RunJobResult.PermanentFailure; + } + + window.log.debug(`GroupSyncJob starting ${thisJobDestination}`); + + const us = UserUtils.getOurPubKeyStrFromCache(); + const ed25519Key = await UserUtils.getUserED25519KeyPairBytes(); + const conversation = ConvoHub.use().get(us); + if (!us || !conversation || !ed25519Key) { + // we check for ed25519Key because it is needed for authenticated requests + window.log.warn('did not find our own conversation'); + return RunJobResult.PermanentFailure; + } + + // return await so we catch exceptions in here + return await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk: thisJobDestination, + extraStoreRequests: [], + }); + + // eslint-disable-next-line no-useless-catch + } catch (e) { + window.log.warn('GroupSyncJob failed with', e.message); + return RunJobResult.RetryJobIfPossible; + } finally { + window.log.debug( + `GroupSyncJob ${ed25519Str(thisJobDestination)} run() took ${Date.now() - start}ms` + ); + + // this is a simple way to make sure whatever happens here, we update the lastest timestamp. + // (a finally statement is always executed (no matter if exception or returns in other try/catch block) + this.updateLastTickTimestamp(); + } + } + + public serializeJob(): GroupSyncPersistedData { + const fromParent = super.serializeBase(); + return fromParent; + } + + public addJobCheck(jobs: Array): AddJobCheckReturn { + return this.addJobCheckSameTypeAndIdentifierPresent(jobs); + } + + public nonRunningJobsToRemove(_jobs: Array) { + return []; + } + + public getJobTimeoutMs(): number { + return 20000; + } + + private updateLastTickTimestamp() { + lastRunConfigSyncJobTimestamps.set(this.persistedData.identifier, Date.now()); + } +} + +/** + * Queue a new Sync Configuration if needed job. + * A GroupSyncJob can only be added if there is none of the same type queued already. + */ +async function queueNewJobIfNeeded(groupPk: GroupPubkeyType) { + if (isSignInByLinking()) { + window.log.info(`NOT Scheduling GroupSyncJob for ${groupPk} as we are linking a device`); + + return; + } + const lastRunConfigSyncJobTimestamp = lastRunConfigSyncJobTimestamps.get(groupPk); + if ( + !lastRunConfigSyncJobTimestamp || + lastRunConfigSyncJobTimestamp < Date.now() - defaultMsBetweenRetries + ) { + // window.log.debug('Scheduling GroupSyncJob: ASAP'); + // we postpone by 1000ms to make sure whoever is adding this job is done with what is needs to do first + // this call will make sure that there is only one configuration sync job at all times + await runners.groupSyncRunner.addJob( + new GroupSyncJob({ identifier: groupPk, nextAttemptTimestamp: Date.now() + 1000 }) + ); + return; + } + + // if we did run at t=100, and it is currently t=110, the difference is 10 + const diff = Math.max(Date.now() - lastRunConfigSyncJobTimestamp, 0); + // but we want to run every 30, so what we need is actually `30-10` from now = 20 + const leftBeforeNextTick = Math.max(defaultMsBetweenRetries - diff, 1000); + // window.log.debug('Scheduling GroupSyncJob: LATER'); + + await runners.groupSyncRunner.addJob( + new GroupSyncJob({ + identifier: groupPk, + nextAttemptTimestamp: Date.now() + leftBeforeNextTick, + }) + ); +} + +export const GroupSync = { + GroupSyncJob, + pushChangesToGroupSwarmIfNeeded, + queueNewJobIfNeeded: (groupPk: GroupPubkeyType) => + allowOnlyOneAtATime(`GroupSyncJob-oneAtAtTime-${groupPk}`, () => queueNewJobIfNeeded(groupPk)), +}; diff --git a/ts/session/utils/job_runners/jobs/JobRunnerType.ts b/ts/session/utils/job_runners/jobs/JobRunnerType.ts index 29ed4a8372..7179856eac 100644 --- a/ts/session/utils/job_runners/jobs/JobRunnerType.ts +++ b/ts/session/utils/job_runners/jobs/JobRunnerType.ts @@ -1,7 +1,11 @@ export type JobRunnerType = - | 'ConfigurationSyncJob' + | 'UserSyncJob' + | 'GroupSyncJob' | 'FetchMsgExpirySwarmJob' | 'UpdateMsgExpirySwarmJob' | 'FakeSleepForJob' | 'FakeSleepForMultiJob' - | 'AvatarDownloadJob'; + | 'AvatarDownloadJob' + | 'GroupInviteJob' + | 'GroupPromoteJob' + | 'GroupPendingRemovalJob'; diff --git a/ts/session/utils/job_runners/jobs/UserSyncJob.ts b/ts/session/utils/job_runners/jobs/UserSyncJob.ts new file mode 100644 index 0000000000..2b3cdea7bc --- /dev/null +++ b/ts/session/utils/job_runners/jobs/UserSyncJob.ts @@ -0,0 +1,261 @@ +/* eslint-disable no-await-in-loop */ +import { PubkeyType } from 'libsession_util_nodejs'; +import { compact, isArray, isEmpty, isNumber, isString } from 'lodash'; +import { v4 } from 'uuid'; +import { to_hex } from 'libsodium-wrappers-sumo'; +import { UserUtils } from '../..'; +import { ConfigDumpData } from '../../../../data/configDump/configDump'; +import { UserSyncJobDone } from '../../../../shims/events'; +import { isSignInByLinking } from '../../../../util/storage'; +import { GenericWrapperActions } from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { + DeleteHashesFromUserNodeSubRequest, + StoreUserConfigSubRequest, +} from '../../../apis/snode_api/SnodeRequestTypes'; +import { DURATION, TTL_DEFAULT } from '../../../constants'; +import { ConvoHub } from '../../../conversations'; +import { MessageSender } from '../../../sending/MessageSender'; +import { allowOnlyOneAtATime } from '../../Promise'; +import { LibSessionUtil, UserSuccessfulChange } from '../../libsession/libsession_utils'; +import { runners } from '../JobRunner'; +import { + AddJobCheckReturn, + PersistedJob, + RunJobResult, + UserSyncPersistedData, +} from '../PersistedJob'; + +const defaultMsBetweenRetries = 5 * DURATION.SECONDS; // a long time between retries, to avoid running multiple jobs at the same time, when one was postponed at the same time as one already planned (5s) +const defaultMaxAttempts = 2; + +/** + * We want to run each of those jobs at least 3 seconds apart. + * So every time one of that job finishes, update this timestamp, so we know when adding a new job, what is the next minimum date to run it. + */ +let lastRunConfigSyncJobTimestamp: number | null = null; + +async function confirmPushedAndDump( + changes: Array, + us: string +): Promise { + for (let i = 0; i < changes.length; i++) { + const change = changes[i]; + const variant = LibSessionUtil.userNamespaceToVariant(change.pushed.namespace); + await GenericWrapperActions.confirmPushed( + variant, + change.pushed.seqno.toNumber(), + change.updatedHash + ); + } + + const { requiredUserVariants } = LibSessionUtil; + for (let index = 0; index < requiredUserVariants.length; index++) { + const variant = requiredUserVariants[index]; + const needsDump = await GenericWrapperActions.needsDump(variant); + + if (!needsDump) { + continue; + } + const dump = await GenericWrapperActions.dump(variant); + await ConfigDumpData.saveConfigDump({ + data: dump, + publicKey: us, + variant, + }); + } +} + +function triggerConfSyncJobDone() { + window.Whisper.events.trigger(UserSyncJobDone); +} + +function isPubkey(us: unknown): us is PubkeyType { + return isString(us) && us.startsWith('05'); +} + +async function pushChangesToUserSwarmIfNeeded() { + const us = UserUtils.getOurPubKeyStrFromCache(); + if (!isPubkey(us)) { + throw new Error('invalid user pubkey, not right prefix'); + } + + // save the dumps to DB even before trying to push them, so at least we have an up to date dumps in the DB in case of crash, no network etc + await LibSessionUtil.saveDumpsToDb(us); + const changesToPush = await LibSessionUtil.pendingChangesForUs(); + + // If there are no pending changes then the job can just complete (next time something + // is updated we want to try and run immediately so don't schedule another run in this case) + if (isEmpty(changesToPush?.messages)) { + triggerConfSyncJobDone(); + return RunJobResult.Success; + } + + const storeRequests = changesToPush.messages.map(m => { + return new StoreUserConfigSubRequest({ + encryptedData: m.ciphertext, + namespace: m.namespace, + ttlMs: TTL_DEFAULT.CONFIG_MESSAGE, + }); + }); + + if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { + for (let index = 0; index < LibSessionUtil.requiredUserVariants.length; index++) { + const variant = LibSessionUtil.requiredUserVariants[index]; + + window.log.info( + `pushChangesToUserSwarmIfNeeded: current dumps: ${variant}:`, + to_hex(await GenericWrapperActions.makeDump(variant)) + ); + } + } + + const deleteHashesSubRequest = changesToPush.allOldHashes.size + ? new DeleteHashesFromUserNodeSubRequest({ + messagesHashes: [...changesToPush.allOldHashes], + }) + : undefined; + + const result = await MessageSender.sendEncryptedDataToSnode({ + sortedSubRequests: compact([...storeRequests, deleteHashesSubRequest]), + destination: us, + method: 'sequence', + }); + + const expectedReplyLength = + changesToPush.messages.length + (changesToPush.allOldHashes.size ? 1 : 0); + // we do a sequence call here. If we do not have the right expected number of results, consider it a failure + if (!isArray(result) || result.length !== expectedReplyLength) { + window.log.info( + `UserSyncJob: unexpected result length: expected ${expectedReplyLength} but got ${result?.length}` + ); + // this might be a 421 error (already handled) so let's retry this request a little bit later + return RunJobResult.RetryJobIfPossible; + } + + const changes = LibSessionUtil.batchResultsToUserSuccessfulChange(result, changesToPush); + if (isEmpty(changes)) { + return RunJobResult.RetryJobIfPossible; + } + // Now that we have the successful changes, we need to mark them as pushed and + // generate any config dumps which need to be stored + + await confirmPushedAndDump(changes, us); + triggerConfSyncJobDone(); + return RunJobResult.Success; +} + +class UserSyncJob extends PersistedJob { + constructor({ + identifier, + nextAttemptTimestamp, + maxAttempts, + currentRetry, + }: Partial< + Pick< + UserSyncPersistedData, + 'identifier' | 'nextAttemptTimestamp' | 'currentRetry' | 'maxAttempts' + > + >) { + super({ + jobType: 'UserSyncJobType', + identifier: identifier || v4(), + delayBetweenRetries: defaultMsBetweenRetries, + maxAttempts: isNumber(maxAttempts) ? maxAttempts : defaultMaxAttempts, + currentRetry: isNumber(currentRetry) ? currentRetry : 0, + nextAttemptTimestamp: nextAttemptTimestamp || Date.now(), + }); + } + + public async run(): Promise { + const start = Date.now(); + + try { + window.log.debug(`UserSyncJob starting ${this.persistedData.identifier}`); + + const us = UserUtils.getOurPubKeyStrFromCache(); + const ed25519Key = await UserUtils.getUserED25519KeyPairBytes(); + const conversation = ConvoHub.use().get(us); + if (!us || !conversation || !ed25519Key) { + // we check for ed25519Key because it is needed for authenticated requests + window.log.warn('did not find our own conversation'); + return RunJobResult.PermanentFailure; + } + + return await UserSync.pushChangesToUserSwarmIfNeeded(); + // eslint-disable-next-line no-useless-catch + } catch (e) { + throw e; + } finally { + window.log.debug(`UserSyncJob run() took ${Date.now() - start}ms`); + + // this is a simple way to make sure whatever happens here, we update the lastest timestamp. + // (a finally statement is always executed (no matter if exception or returns in other try/catch block) + this.updateLastTickTimestamp(); + } + } + + public serializeJob(): UserSyncPersistedData { + const fromParent = super.serializeBase(); + return fromParent; + } + + public addJobCheck(jobs: Array): AddJobCheckReturn { + return this.addJobCheckSameTypePresent(jobs); + } + + /** + * For the SharedConfig job, we do not care about the jobs already in the list. + * We never want to add a new sync configuration job if there is already one in the queue. + * This is done by the `addJobCheck` method above + */ + public nonRunningJobsToRemove(_jobs: Array) { + return []; + } + + public getJobTimeoutMs(): number { + return 20000; + } + + private updateLastTickTimestamp() { + lastRunConfigSyncJobTimestamp = Date.now(); + } +} + +/** + * Queue a new Sync Configuration if needed job. + * A UserSyncJob can only be added if there is none of the same type queued already. + */ +async function queueNewJobIfNeeded() { + if (isSignInByLinking()) { + window.log.info('NOT Scheduling ConfSyncJob: as we are linking a device'); + + return; + } + if ( + !lastRunConfigSyncJobTimestamp || + lastRunConfigSyncJobTimestamp < Date.now() - defaultMsBetweenRetries + ) { + // Note: we postpone by 3s for two reasons: + // - to make sure whoever is adding this job is done with what is needs to do first + // - to allow a recently created device to process incoming config messages before pushing a new one + // this call will make sure that there is only one configuration sync job at all times + await runners.userSyncRunner.addJob( + new UserSyncJob({ nextAttemptTimestamp: Date.now() + 3 * DURATION.SECONDS }) + ); + } else { + // if we did run at t=100, and it is currently t=110, the difference is 10 + const diff = Math.max(Date.now() - lastRunConfigSyncJobTimestamp, 0); + // but we want to run every 30, so what we need is actually `30-10` from now = 20 + const leftBeforeNextTick = Math.max(defaultMsBetweenRetries - diff, 1 * DURATION.SECONDS); + + await runners.userSyncRunner.addJob( + new UserSyncJob({ nextAttemptTimestamp: Date.now() + leftBeforeNextTick }) + ); + } +} + +export const UserSync = { + UserSyncJob, + pushChangesToUserSwarmIfNeeded, + queueNewJobIfNeeded: () => allowOnlyOneAtATime('UserSyncJob-oneAtAtTime', queueNewJobIfNeeded), +}; diff --git a/ts/session/utils/libsession/libsession_utils.ts b/ts/session/utils/libsession/libsession_utils.ts index 205bc7fc0a..9af909b4f8 100644 --- a/ts/session/utils/libsession/libsession_utils.ts +++ b/ts/session/utils/libsession/libsession_utils.ts @@ -1,41 +1,38 @@ /* eslint-disable no-await-in-loop */ /* eslint-disable import/extensions */ /* eslint-disable import/no-unresolved */ -import { difference, omit } from 'lodash'; +import { GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; +import { from_hex } from 'libsodium-wrappers-sumo'; +import { compact, difference, isString, omit } from 'lodash'; import Long from 'long'; import { UserUtils } from '..'; import { ConfigDumpData } from '../../../data/configDump/configDump'; -import { SignalService } from '../../../protobuf'; import { assertUnreachable } from '../../../types/sqlSharedTypes'; -import { ConfigWrapperObjectTypes } from '../../../webworker/workers/browser/libsession_worker_functions'; -import { GenericWrapperActions } from '../../../webworker/workers/browser/libsession_worker_interface'; -import { GetNetworkTime } from '../../apis/snode_api/getNetworkTime'; -import { SnodeNamespaces } from '../../apis/snode_api/namespaces'; -import { SharedConfigMessage } from '../../messages/outgoing/controlMessage/SharedConfigMessage'; -import { ConfigurationSync } from '../job_runners/jobs/ConfigurationSyncJob'; - -const requiredUserVariants: Array = [ +import { + ConfigWrapperGroupDetailed, + ConfigWrapperUser, + isUserConfigWrapperType, +} from '../../../webworker/workers/browser/libsession_worker_functions'; +import { + GenericWrapperActions, + MetaGroupWrapperActions, +} from '../../../webworker/workers/browser/libsession_worker_interface'; +import { SnodeNamespaces, SnodeNamespacesUserConfig } from '../../apis/snode_api/namespaces'; +import { + BatchResultEntry, + NotEmptyArrayOfBatchResults, +} from '../../apis/snode_api/SnodeRequestTypes'; +import { PubKey } from '../../types'; +import { UserSync } from '../job_runners/jobs/UserSyncJob'; +import { ed25519Str } from '../String'; + +const requiredUserVariants: Array = [ 'UserConfig', 'ContactsConfig', 'UserGroupsConfig', 'ConvoInfoVolatileConfig', ]; -export type IncomingConfResult = { - needsPush: boolean; - needsDump: boolean; - kind: SignalService.SharedConfigMessage.Kind; - publicKey: string; - // NOTE this is the latest sent timestamp of the config message - latestEnvelopeTimestamp: number; -}; - -export type OutgoingConfResult = { - message: SharedConfigMessage; - namespace: SnodeNamespaces; - oldMessageHashes: Array; -}; - /** * Initializes the libsession wrappers for the required user variants if the dumps are not already in the database. It will use an empty dump if the dump is not found. */ @@ -46,7 +43,7 @@ async function initializeLibSessionUtilWrappers() { } const privateKeyEd25519 = keypair.privKeyBytes; // let's plan a sync on start with some room for the app to be ready - setTimeout(() => ConfigurationSync.queueNewJobIfNeeded, 20000); + setTimeout(() => UserSync.queueNewJobIfNeeded, 20000); // fetch the dumps we already have from the database const dumps = await ConfigDumpData.getAllDumpsWithData(); @@ -55,27 +52,31 @@ async function initializeLibSessionUtilWrappers() { JSON.stringify(dumps.map(m => omit(m, 'data'))) ); - const userVariantsBuildWithoutErrors = new Set(); + const userVariantsBuildWithoutErrors = new Set(); // load the dumps retrieved from the database into their corresponding wrappers for (let index = 0; index < dumps.length; index++) { const dump = dumps[index]; - window.log.debug('initializeLibSessionUtilWrappers initing from dump', dump.variant); + const variant = dump.variant; + if (!isUserConfigWrapperType(variant)) { + continue; + } + window.log.debug('initializeLibSessionUtilWrappers initing from dump', variant); try { await GenericWrapperActions.init( - dump.variant, + variant, privateKeyEd25519, dump.data.length ? dump.data : null ); - userVariantsBuildWithoutErrors.add(dump.variant); + userVariantsBuildWithoutErrors.add(variant); } catch (e) { window.log.warn(`init of UserConfig failed with ${e.message} `); throw new Error(`initializeLibSessionUtilWrappers failed with ${e.message}`); } } - const missingRequiredVariants: Array = difference( + const missingRequiredVariants: Array = difference( LibSessionUtil.requiredUserVariants, [...userVariantsBuildWithoutErrors.values()] ); @@ -98,112 +99,336 @@ async function initializeLibSessionUtilWrappers() { `initializeLibSessionUtilWrappers: missingRequiredVariants "${missingVariant}" created` ); } + + // No need to load the meta group wrapper here. We will load them once the SessionInbox is loaded with a redux action } -async function pendingChangesForPubkey(pubkey: string): Promise> { - const dumps = await ConfigDumpData.getAllDumpsWithoutData(); - const us = UserUtils.getOurPubKeyStrFromCache(); - - // Ensure we always check the required user config types for changes even if there is no dump - // data yet (to deal with first launch cases) - if (pubkey === us) { - LibSessionUtil.requiredUserVariants.forEach(requiredVariant => { - if (!dumps.find(m => m.publicKey === us && m.variant === requiredVariant)) { - dumps.push({ - publicKey: us, - variant: requiredVariant, - }); - } - }); - } +type PendingChangesShared = { + ciphertext: Uint8Array; +}; + +export type PendingChangesForUs = PendingChangesShared & { + seqno: Long; + namespace: SnodeNamespacesUserConfig; +}; - const results: Array = []; - const variantsNeedingPush = new Set(); +type PendingChangesForGroupNonKey = PendingChangesShared & { + seqno: Long; + namespace: SnodeNamespaces.ClosedGroupInfo | SnodeNamespaces.ClosedGroupMembers; + type: Extract; +}; - for (let index = 0; index < dumps.length; index++) { - const dump = dumps[index]; - const variant = dump.variant; - const needsPush = await GenericWrapperActions.needsPush(variant); +type PendingChangesForGroupKey = { + ciphertext: Uint8Array; + namespace: SnodeNamespaces.ClosedGroupKeys; + type: Extract; +}; + +export type PendingChangesForGroup = PendingChangesForGroupNonKey | PendingChangesForGroupKey; + +type DestinationChanges = { + messages: Array; + allOldHashes: Set; +}; + +export type UserDestinationChanges = DestinationChanges; +export type GroupDestinationChanges = DestinationChanges; + +export type UserSuccessfulChange = { + pushed: PendingChangesForUs; + updatedHash: string; +}; +export type GroupSuccessfulChange = { + pushed: PendingChangesForGroup; + updatedHash: string; +}; + +/** + * Fetch what needs to be pushed for all of the current user's wrappers. + */ +async function pendingChangesForUs(): Promise { + const results: UserDestinationChanges = { messages: [], allOldHashes: new Set() }; + const variantsNeedingPush = new Set(); + const userVariants = LibSessionUtil.requiredUserVariants; + + for (let index = 0; index < userVariants.length; index++) { + const variant = userVariants[index]; + + const needsPush = await GenericWrapperActions.needsPush(variant); if (!needsPush) { continue; } + const { data, seqno, hashes, namespace } = await GenericWrapperActions.push(variant); variantsNeedingPush.add(variant); - const { data: readyToSendData, seqno, hashes } = await GenericWrapperActions.push(variant); + results.messages.push({ + ciphertext: data, + seqno: Long.fromNumber(seqno), + namespace, // we only use the namespace to know to wha + }); + + hashes.forEach(h => results.allOldHashes.add(h)); // add all the hashes to the set + } + window.log.info(`those user variants needs push: "${[...variantsNeedingPush]}"`); + + return results; +} - const kind = variantToKind(variant); +/** + * Fetch what needs to be pushed for the specified group public key. + * @param groupPk the public key of the group to fetch the details off + * @returns an object with a list of messages to be pushed and the list of hashes to bump expiry, server side + */ +async function pendingChangesForGroup(groupPk: GroupPubkeyType): Promise { + if (!PubKey.is03Pubkey(groupPk)) { + throw new Error(`pendingChangesForGroup only works for user or 03 group pubkeys`); + } + // one of the wrapper behind the metagroup needs a push + const needsPush = await MetaGroupWrapperActions.needsPush(groupPk); + + // we probably need to add the GROUP_KEYS check here - const namespace = await GenericWrapperActions.storageNamespace(variant); + if (!needsPush) { + return { messages: [], allOldHashes: new Set() }; + } + const { groupInfo, groupMember, groupKeys } = await MetaGroupWrapperActions.push(groupPk); + const results = new Array(); + + // Note: We need the keys to be pushed first to avoid a race condition + if (groupKeys) { results.push({ - message: new SharedConfigMessage({ - readyToSendData, - kind, - seqno: Long.fromNumber(seqno), - timestamp: GetNetworkTime.getNowWithNetworkOffset(), - }), - oldMessageHashes: hashes, - namespace, + type: 'GroupKeys', + ciphertext: groupKeys.data, + namespace: groupKeys.namespace, }); } - window.log.info(`those variants needs push: "${[...variantsNeedingPush]}"`); - return results; + if (groupInfo) { + results.push({ + type: 'GroupInfo', + ciphertext: groupInfo.data, + seqno: Long.fromNumber(groupInfo.seqno), + namespace: groupInfo.namespace, + }); + } + if (groupMember) { + results.push({ + type: 'GroupMember', + ciphertext: groupMember.data, + seqno: Long.fromNumber(groupMember.seqno), + namespace: groupMember.namespace, + }); + } + window.log.debug( + `${ed25519Str(groupPk)} those group variants needs push: "${results.map(m => m.type)}"` + ); + + const memberHashes = compact(groupMember?.hashes) || []; + const infoHashes = compact(groupInfo?.hashes) || []; + const allOldHashes = new Set([...infoHashes, ...memberHashes]); + + return { messages: results, allOldHashes }; } -// eslint-disable-next-line consistent-return -function kindToVariant(kind: SignalService.SharedConfigMessage.Kind): ConfigWrapperObjectTypes { - switch (kind) { - case SignalService.SharedConfigMessage.Kind.USER_PROFILE: +/** + * Return the wrapperId associated with a specific namespace. + * WrapperIds are what we use in the database and with the libsession workers calls, and namespace is what we push to. + */ +function userNamespaceToVariant(namespace: SnodeNamespacesUserConfig) { + // TODO Might be worth migrating them to use directly the namespaces? + switch (namespace) { + case SnodeNamespaces.UserProfile: return 'UserConfig'; - case SignalService.SharedConfigMessage.Kind.CONTACTS: + case SnodeNamespaces.UserContacts: return 'ContactsConfig'; - case SignalService.SharedConfigMessage.Kind.USER_GROUPS: + case SnodeNamespaces.UserGroups: return 'UserGroupsConfig'; - case SignalService.SharedConfigMessage.Kind.CONVO_INFO_VOLATILE: + case SnodeNamespaces.ConvoInfoVolatile: return 'ConvoInfoVolatileConfig'; default: - assertUnreachable(kind, `kindToVariant: Unsupported variant: "${kind}"`); + assertUnreachable(namespace, `userNamespaceToVariant: Unsupported namespace: "${namespace}"`); + throw new Error('userNamespaceToVariant: Unsupported namespace:'); // ts is not happy without this } } -// eslint-disable-next-line consistent-return -function variantToKind(variant: ConfigWrapperObjectTypes): SignalService.SharedConfigMessage.Kind { - switch (variant) { - case 'UserConfig': - return SignalService.SharedConfigMessage.Kind.USER_PROFILE; - case 'ContactsConfig': - return SignalService.SharedConfigMessage.Kind.CONTACTS; - case 'UserGroupsConfig': - return SignalService.SharedConfigMessage.Kind.USER_GROUPS; - case 'ConvoInfoVolatileConfig': - return SignalService.SharedConfigMessage.Kind.CONVO_INFO_VOLATILE; - default: - assertUnreachable(variant, `variantToKind: Unsupported kind: "${variant}"`); +function resultShouldBeIncluded( + msgPushed: T, + batchResult: BatchResultEntry +) { + const hash = batchResult.body?.hash; + if (batchResult.code === 200 && isString(hash) && msgPushed && msgPushed.ciphertext) { + return { + hash, + pushed: msgPushed, + }; } + return null; } /** - * Returns true if the config needs to be dumped afterwards + * This function is run once we get the results from the multiple batch-send for the group push. + * Note: the logic is the same as `batchResultsToUserSuccessfulChange` but I couldn't make typescript happy. */ -async function markAsPushed( - variant: ConfigWrapperObjectTypes, - pubkey: string, - seqno: number, - hash: string -) { +function batchResultsToGroupSuccessfulChange( + result: NotEmptyArrayOfBatchResults | null, + request: GroupDestinationChanges +): Array { + const successfulChanges: Array = []; + + /** + * For each batch request, we get as result + * - status code + hash of the new config message + * - status code of the delete of all messages as given by the request hashes. + * + * As it is a sequence, the delete might have failed but the new config message might still be posted. + * So we need to check which request failed, and if it is the delete by hashes, we need to add the hash of the posted message to the list of hashes + */ + if (!result?.length) { + return successfulChanges; + } + + for (let j = 0; j < result.length; j++) { + const msgPushed = request.messages?.[j]; + + const shouldBe = resultShouldBeIncluded(msgPushed, result[j]); + + if (shouldBe) { + // libsession keeps track of the hashes to push and the one pushed + successfulChanges.push({ + updatedHash: shouldBe.hash, + pushed: shouldBe.pushed, + }); + } + } + + return successfulChanges; +} + +/** + * This function is run once we get the results from the multiple batch-send for the user push. + * Note: the logic is the same as `batchResultsToGroupSuccessfulChange` but I couldn't make typescript happy. + */ +function batchResultsToUserSuccessfulChange( + result: NotEmptyArrayOfBatchResults | null, + request: UserDestinationChanges +): Array { + const successfulChanges: Array = []; + + /** + * For each batch request, we get as result + * - status code + hash of the new config message + * - status code of the delete of all messages as given by the request hashes. + * + * As it is a sequence, the delete might have failed but the new config message might still be posted. + * So we need to check which request failed, and if it is the delete by hashes, we need to add the hash of the posted message to the list of hashes + */ + + if (!result?.length) { + return successfulChanges; + } + + for (let j = 0; j < result.length; j++) { + const msgPushed = request.messages?.[j]; + const shouldBe = resultShouldBeIncluded(msgPushed, result[j]); + + if (shouldBe) { + // libsession keeps track of the hashes to push and the one pushed + successfulChanges.push({ + updatedHash: shouldBe.hash, + pushed: shouldBe.pushed, + }); + } + } + + return successfulChanges; +} + +/** + * Check if the wrappers related to that pubkeys need to be dumped to the DB, and if yes, do it. + */ +async function saveDumpsToDb(pubkey: PubkeyType | GroupPubkeyType) { + // first check if this is relating a group + if (PubKey.is03Pubkey(pubkey)) { + const metaNeedsDump = await MetaGroupWrapperActions.needsDump(pubkey); + // save the concatenated dumps as a single entry in the DB if any of the dumps had a need for dump + if (metaNeedsDump) { + window.log.debug(`About to make and save dumps for metagroup ${ed25519Str(pubkey)}`); + + const dump = await MetaGroupWrapperActions.metaDump(pubkey); + await ConfigDumpData.saveConfigDump({ + data: dump, + publicKey: pubkey, + variant: `MetaGroupConfig-${pubkey}`, + }); + + window.log.info(`Saved dumps for metagroup ${ed25519Str(pubkey)}`); + } else { + window.log.debug(`No need to update local dumps for metagroup ${ed25519Str(pubkey)}`); + } + return; + } + // here, we can only be called with our current user pubkey if (pubkey !== UserUtils.getOurPubKeyStrFromCache()) { - throw new Error('FIXME, generic case is to be done'); + throw new Error('saveDumpsToDb only supports groupv2 and us pubkeys'); + } + + for (let i = 0; i < LibSessionUtil.requiredUserVariants.length; i++) { + const variant = LibSessionUtil.requiredUserVariants[i]; + const needsDump = await GenericWrapperActions.needsDump(variant); + + if (!needsDump) { + continue; + } + const dump = await GenericWrapperActions.dump(variant); + await ConfigDumpData.saveConfigDump({ + data: dump, + publicKey: pubkey, + variant, + }); + } +} + +/** + * Creates the specified member in the specified group wrapper and sets the details provided. + * Note: no checks are done, so if the member existed already it's name/profile picture are overridden. + * + * This should only be used when the current device is explicitly inviting a new member to the group. + */ +async function createMemberAndSetDetails({ + displayName, + memberPubkey, + groupPk, + avatarUrl, + profileKeyHex, +}: { + memberPubkey: PubkeyType; + displayName: string | null; + groupPk: GroupPubkeyType; + profileKeyHex: string | null; + avatarUrl: string | null; +}) { + await MetaGroupWrapperActions.memberConstructAndSet(groupPk, memberPubkey); + + if (displayName) { + await MetaGroupWrapperActions.memberSetNameTruncated(groupPk, memberPubkey, displayName); + } + if (profileKeyHex && avatarUrl) { + await MetaGroupWrapperActions.memberSetProfilePicture(groupPk, memberPubkey, { + url: avatarUrl, + key: from_hex(profileKeyHex), + }); } - await GenericWrapperActions.confirmPushed(variant, seqno, hash); - return GenericWrapperActions.needsDump(variant); } export const LibSessionUtil = { initializeLibSessionUtilWrappers, + userNamespaceToVariant, requiredUserVariants, - pendingChangesForPubkey, - kindToVariant, - variantToKind, - markAsPushed, + pendingChangesForUs, + pendingChangesForGroup, + saveDumpsToDb, + batchResultsToGroupSuccessfulChange, + batchResultsToUserSuccessfulChange, + createMemberAndSetDetails, }; diff --git a/ts/session/utils/libsession/libsession_utils_contacts.ts b/ts/session/utils/libsession/libsession_utils_contacts.ts index a704da1a5d..d1961755a3 100644 --- a/ts/session/utils/libsession/libsession_utils_contacts.ts +++ b/ts/session/utils/libsession/libsession_utils_contacts.ts @@ -2,8 +2,9 @@ import { ContactInfo, ContactInfoSet } from 'libsession_util_nodejs'; import { ConversationModel } from '../../../models/conversation'; import { getContactInfoFromDBValues } from '../../../types/sqlSharedTypes'; import { ContactsWrapperActions } from '../../../webworker/workers/browser/libsession_worker_interface'; -import { getConversationController } from '../../conversations'; +import { ConvoHub } from '../../conversations'; import { PubKey } from '../../types'; +import { CONVERSATION_PRIORITIES } from '../../../models/types'; /** * This file is centralizing the management of data from the Contacts Wrapper of libsession. @@ -18,7 +19,7 @@ import { PubKey } from '../../types'; * * Also, to make sure that our wrapper is up to date, we schedule jobs to be run and fetch all contacts and update all the wrappers entries. * This is done in the - * - `ConfigurationSyncJob` (sending data to the network) and the + * - `UserSyncJob` (sending data to the network) and the * */ const mappedContactWrapperValues = new Map(); @@ -41,10 +42,11 @@ function isContactToStoreInWrapper(convo: ConversationModel): boolean { * Fetches the specified convo and updates the required field in the wrapper. * If that contact does not exist in the wrapper, it is created before being updated. */ + async function insertContactFromDBIntoWrapperAndRefresh( id: string ): Promise { - const foundConvo = getConversationController().get(id); + const foundConvo = ConvoHub.use().get(id); if (!foundConvo) { return null; } @@ -52,17 +54,21 @@ async function insertContactFromDBIntoWrapperAndRefresh( if (!SessionUtilContact.isContactToStoreInWrapper(foundConvo)) { return null; } - - const dbName = foundConvo.get('displayNameInProfile') || undefined; - const dbNickname = foundConvo.get('nickname') || undefined; + // Note: We NEED those to come from the convo itself as .get() calls directly + // and not from the isApproved(), didApproveMe() functions. + // + // The reason is that when we make a change, we need to save it to the DB to update the libsession state (on commit()). + // But, if we use isApproved() instead of .get('isApproved'), we get the value from libsession which is not up to date with a change made in the convo yet! + const dbName = foundConvo.getRealSessionUsername() || undefined; + const dbNickname = foundConvo.get('nickname'); const dbProfileUrl = foundConvo.get('avatarPointer') || undefined; const dbProfileKey = foundConvo.get('profileKey') || undefined; - const dbApproved = !!foundConvo.get('isApproved') || false; - const dbApprovedMe = !!foundConvo.get('didApproveMe') || false; - const dbBlocked = !!foundConvo.isBlocked() || false; - const priority = foundConvo.get('priority') || 0; - const expirationMode = foundConvo.getExpirationMode() || undefined; - const expireTimer = foundConvo.getExpireTimer() || 0; + const dbApproved = !!foundConvo.get('isApproved'); + const dbApprovedMe = !!foundConvo.get('didApproveMe'); + const dbBlocked = foundConvo.isBlocked(); + const priority = foundConvo.get('priority') || CONVERSATION_PRIORITIES.default; + const expirationMode = foundConvo.get('expirationMode') || undefined; + const expireTimer = foundConvo.get('expireTimer') || 0; const wrapperContact = getContactInfoFromDBValues({ id, @@ -81,15 +87,15 @@ async function insertContactFromDBIntoWrapperAndRefresh( try { window.log.debug('inserting into contact wrapper: ', JSON.stringify(wrapperContact)); await ContactsWrapperActions.set(wrapperContact); - // returned for testing purposes only - return wrapperContact; } catch (e) { window.log.warn(`ContactsWrapperActions.set of ${id} failed with ${e.message}`); // we still let this go through + } finally { + await refreshMappedValue(id); } - await refreshMappedValue(id); - return null; + // returned for testing purposes only + return wrapperContact; } /** @@ -101,11 +107,11 @@ async function refreshMappedValue(id: string, duringAppStart = false) { if (fromWrapper) { setMappedValue(fromWrapper); if (!duringAppStart) { - getConversationController().get(id)?.triggerUIRefresh(); + ConvoHub.use().get(id)?.triggerUIRefresh(); } } else if (mappedContactWrapperValues.delete(id)) { if (!duringAppStart) { - getConversationController().get(id)?.triggerUIRefresh(); + ConvoHub.use().get(id)?.triggerUIRefresh(); } } } diff --git a/ts/session/utils/libsession/libsession_utils_convo_info_volatile.ts b/ts/session/utils/libsession/libsession_utils_convo_info_volatile.ts index a426ce1e6d..8a562ca49d 100644 --- a/ts/session/utils/libsession/libsession_utils_convo_info_volatile.ts +++ b/ts/session/utils/libsession/libsession_utils_convo_info_volatile.ts @@ -1,5 +1,5 @@ /* eslint-disable no-case-declarations */ -import { BaseConvoInfoVolatile, ConvoVolatileType } from 'libsession_util_nodejs'; +import { BaseConvoInfoVolatile, ConvoVolatileType, GroupPubkeyType } from 'libsession_util_nodejs'; import { isEmpty, isFinite } from 'lodash'; import { Data } from '../../../data/data'; import { OpenGroupData } from '../../../data/opengroups'; @@ -10,7 +10,8 @@ import { UserGroupsWrapperActions, } from '../../../webworker/workers/browser/libsession_worker_interface'; import { OpenGroupUtils } from '../../apis/open_group_api/utils'; -import { getConversationController } from '../../conversations'; +import { ConvoHub } from '../../conversations'; +import { PubKey } from '../../types'; import { SessionUtilContact } from './libsession_utils_contacts'; import { SessionUtilUserGroups } from './libsession_utils_user_groups'; import { SessionUtilUserProfile } from './libsession_utils_user_profile'; @@ -25,6 +26,11 @@ const mapped1o1WrapperValues = new Map(); */ const mappedLegacyGroupWrapperValues = new Map(); +/** + * The key of this map is the convoId as stored in the database. So the group 03 pubkey + */ +const mappedGroupWrapperValues = new Map(); + /** * The key of this map is the convoId as stored in the database, so withoutpubkey */ @@ -52,7 +58,9 @@ function getConvoType(convo: ConversationModel): ConvoVolatileType { ? '1o1' : SessionUtilUserGroups.isCommunityToStoreInWrapper(convo) ? 'Community' - : 'LegacyGroup'; + : SessionUtilUserGroups.isLegacyGroupToStoreInWrapper(convo) + ? 'LegacyGroup' + : 'Group'; return convoType; } @@ -64,7 +72,7 @@ function getConvoType(convo: ConversationModel): ConvoVolatileType { */ async function insertConvoFromDBIntoWrapperAndRefresh(convoId: string): Promise { // this is too slow to fetch from the database the up to date data here. Let's hope that what we have in memory is up to date enough - const foundConvo = getConversationController().get(convoId); + const foundConvo = ConvoHub.use().get(convoId); if (!foundConvo || !isConvoToStoreInWrapper(foundConvo)) { return; } @@ -78,9 +86,9 @@ async function insertConvoFromDBIntoWrapperAndRefresh(convoId: string): Promise< ? timestampFromDbMs : 0; - window.log.debug( - `inserting into convoVolatile wrapper: ${convoId} lastMessageReadTimestamp:${lastReadMessageTimestamp} forcedUnread:${isForcedUnread}...` - ); + // window.log.debug( + // `inserting into convoVolatile wrapper: ${convoId} lastMessageReadTimestamp:${lastReadMessageTimestamp} forcedUnread:${isForcedUnread}...` + // ); const convoType = getConvoType(foundConvo); switch (convoType) { @@ -113,6 +121,23 @@ async function insertConvoFromDBIntoWrapperAndRefresh(convoId: string): Promise< ); } break; + case 'Group': + try { + if (!PubKey.is03Pubkey(convoId)) { + throw new Error('group but not with 03 prefix'); + } + await ConvoInfoVolatileWrapperActions.setGroup( + convoId, + lastReadMessageTimestamp, + isForcedUnread + ); + await refreshConvoVolatileCached(convoId, true, false); + } catch (e) { + window.log.warn( + `ConvoInfoVolatileWrapperActions.setGroup of ${convoId} failed with ${e.message}` + ); + } + break; case 'Community': try { const asOpengroup = foundConvo.toOpenGroupV2(); @@ -167,6 +192,8 @@ async function refreshConvoVolatileCached( convoType = 'Community'; } else if (convoId.startsWith('05') && isLegacyGroup) { convoType = 'LegacyGroup'; + } else if (PubKey.is03Pubkey(convoId)) { + convoType = 'Group'; } else if (convoId.startsWith('05')) { convoType = '1o1'; } @@ -187,6 +214,16 @@ async function refreshConvoVolatileCached( } refreshed = true; break; + case 'Group': + if (!PubKey.is03Pubkey(convoId)) { + throw new Error('expected a 03 group'); + } + const fromWrapperGroup = await ConvoInfoVolatileWrapperActions.getGroup(convoId); + if (fromWrapperGroup) { + mappedGroupWrapperValues.set(convoId, fromWrapperGroup); + } + refreshed = true; + break; case 'Community': const fromWrapperCommunity = await ConvoInfoVolatileWrapperActions.getCommunity(convoId); if (fromWrapperCommunity && fromWrapperCommunity.fullUrlWithPubkey) { @@ -200,7 +237,7 @@ async function refreshConvoVolatileCached( } if (refreshed && !duringAppStart) { - getConversationController().get(convoId)?.triggerUIRefresh(); + ConvoHub.use().get(convoId)?.triggerUIRefresh(); } } catch (e) { window.log.info(`refreshMappedValue for volatile convoID: ${convoId}`, e.message); @@ -240,6 +277,15 @@ async function removeLegacyGroupFromWrapper(convoId: string) { mappedLegacyGroupWrapperValues.delete(convoId); } +async function removeGroupFromWrapper(groupPk: GroupPubkeyType) { + try { + await ConvoInfoVolatileWrapperActions.eraseGroup(groupPk); + } catch (e) { + window.log.warn('removeGroupFromWrapper failed with ', e.message); + } + mappedGroupWrapperValues.delete(groupPk); +} + /** * Removes the matching legacy group from the wrapper and from the cached list of legacy groups */ @@ -262,7 +308,7 @@ async function removeContactFromWrapper(convoId: string) { * whole other bunch of issues because it is a native node module. */ function getConvoInfoVolatileTypes(): Array { - return ['1o1', 'LegacyGroup', 'Community']; + return ['1o1', 'LegacyGroup', 'Group', 'Community']; } export const SessionUtilConvoInfoVolatile = { @@ -277,7 +323,10 @@ export const SessionUtilConvoInfoVolatile = { removeContactFromWrapper, // legacy group - removeLegacyGroupFromWrapper, // a group can be removed but also just marked hidden, so only call this function when the group is completely removed // TODOLATER + removeLegacyGroupFromWrapper, // a group can be removed but also just marked hidden, so only call this function when the group is completely removed + + // group + removeGroupFromWrapper, // a group can be removed but also just marked hidden, so only call this function when the group is completely removed // communities removeCommunityFromWrapper, diff --git a/ts/session/utils/libsession/libsession_utils_multi_encrypt.ts b/ts/session/utils/libsession/libsession_utils_multi_encrypt.ts new file mode 100644 index 0000000000..efdf6b810c --- /dev/null +++ b/ts/session/utils/libsession/libsession_utils_multi_encrypt.ts @@ -0,0 +1,42 @@ +import { EncryptionDomain } from 'libsession_util_nodejs'; +import { MultiEncryptWrapperActions } from '../../../webworker/workers/browser/libsession_worker_interface'; + +const allKnownEncryptionDomains: Array = ['SessionGroupKickedMessage']; + +/** + * Try to decrypt the content with any type of encryption domains we know. + * Does not throw, will return null if we couldn't decrypt it successfuly. + */ +async function multiDecryptAnyEncryptionDomain({ + encoded, + senderEd25519Pubkey, + userEd25519SecretKey, +}: { + encoded: Uint8Array; + senderEd25519Pubkey: Uint8Array; + userEd25519SecretKey: Uint8Array; +}) { + for (let index = 0; index < allKnownEncryptionDomains.length; index++) { + const domain = allKnownEncryptionDomains[index]; + try { + // eslint-disable-next-line no-await-in-loop + const decrypted = await MultiEncryptWrapperActions.multiDecryptEd25519({ + encoded, + senderEd25519Pubkey, + userEd25519SecretKey, + domain, + }); + return { decrypted, domain }; + } catch (e) { + window.log.info( + `multiDecryptAnyEncryptionDomain: failed to decrypt message with encryption domain: ${domain}` + ); + } + } + window.log.info(`multiDecryptAnyEncryptionDomain: failed to decrypt message entirely`); + return null; +} + +export const MultiEncryptUtils = { + multiDecryptAnyEncryptionDomain, +}; diff --git a/ts/session/utils/libsession/libsession_utils_user_groups.ts b/ts/session/utils/libsession/libsession_utils_user_groups.ts index 874a2e095f..caf46a466f 100644 --- a/ts/session/utils/libsession/libsession_utils_user_groups.ts +++ b/ts/session/utils/libsession/libsession_utils_user_groups.ts @@ -10,13 +10,19 @@ import { getLegacyGroupInfoFromDBValues, } from '../../../types/sqlSharedTypes'; import { UserGroupsWrapperActions } from '../../../webworker/workers/browser/libsession_worker_interface'; -import { getConversationController } from '../../conversations'; +import { ConvoHub } from '../../conversations'; +import { PubKey } from '../../types'; +import { CONVERSATION_PRIORITIES } from '../../../models/types'; /** * Returns true if that conversation is an active group */ function isUserGroupToStoreInWrapper(convo: ConversationModel): boolean { - return isCommunityToStoreInWrapper(convo) || isLegacyGroupToStoreInWrapper(convo); + return ( + isCommunityToStoreInWrapper(convo) || + isLegacyGroupToStoreInWrapper(convo) || + isGroupToStoreInWrapper(convo) + ); } function isCommunityToStoreInWrapper(convo: ConversationModel): boolean { @@ -26,14 +32,16 @@ function isCommunityToStoreInWrapper(convo: ConversationModel): boolean { function isLegacyGroupToStoreInWrapper(convo: ConversationModel): boolean { return ( convo.isGroup() && - !convo.isPublic() && - convo.id.startsWith('05') && // new closed groups won't start with 05 + PubKey.is05Pubkey(convo.id) && // we only check legacy group here convo.isActive() && - !convo.get('isKickedFromGroup') && - !convo.get('left') + !convo.isKickedFromGroup() // we cannot have a left group anymore. We remove it when we leave it. ); } +function isGroupToStoreInWrapper(convo: ConversationModel): boolean { + return convo.isGroup() && PubKey.is03Pubkey(convo.id) && convo.isActive(); +} + /** * We do not want to include groups left in the wrapper, but when receiving a list * of wrappers from the network we need to check against the one present locally @@ -65,7 +73,7 @@ function isLegacyGroupToRemoveFromDBIfNotInWrapper(convo: ConversationModel): bo async function insertGroupsFromDBIntoWrapperAndRefresh( convoId: string ): Promise { - const foundConvo = getConversationController().get(convoId); + const foundConvo = ConvoHub.use().get(convoId); if (!foundConvo) { return null; } @@ -76,7 +84,9 @@ async function insertGroupsFromDBIntoWrapperAndRefresh( const convoType: UserGroupsType = SessionUtilUserGroups.isCommunityToStoreInWrapper(foundConvo) ? 'Community' - : 'LegacyGroup'; + : PubKey.is03Pubkey(convoId) + ? 'Group' + : 'LegacyGroup'; switch (convoType) { case 'Community': @@ -95,12 +105,12 @@ async function insertGroupsFromDBIntoWrapperAndRefresh( ); const wrapperComm = getCommunityInfoFromDBValues({ - priority: foundConvo.get('priority'), + priority: foundConvo.get('priority') || CONVERSATION_PRIORITIES.default, // this has to be a direct call to .get fullUrl, }); try { - window.log.debug(`inserting into usergroup wrapper "${JSON.stringify(wrapperComm)}"...`); + window.log.debug(`inserting into user group wrapper "${JSON.stringify(wrapperComm)}"...`); // this does the create or the update of the matching existing community await UserGroupsWrapperActions.setCommunityByFullUrl( wrapperComm.fullUrl, @@ -120,22 +130,24 @@ async function insertGroupsFromDBIntoWrapperAndRefresh( case 'LegacyGroup': const encryptionKeyPair = await Data.getLatestClosedGroupEncryptionKeyPair(convoId); + // Note: For any fields stored in both the DB and libsession, + // we have to make direct calls to.get() and NOT the wrapped getPriority(), etc... const wrapperLegacyGroup = getLegacyGroupInfoFromDBValues({ id: foundConvo.id, - priority: foundConvo.get('priority'), + priority: foundConvo.get('priority') || CONVERSATION_PRIORITIES.default, members: foundConvo.get('members') || [], - groupAdmins: foundConvo.get('groupAdmins') || [], - expirationMode: foundConvo.getExpirationMode() || 'off', - expireTimer: foundConvo.getExpireTimer() || 0, - displayNameInProfile: foundConvo.get('displayNameInProfile'), + groupAdmins: foundConvo.getGroupAdmins(), // cannot be changed for legacy groups, so we don't care + expirationMode: foundConvo.get('expirationMode') || 'off', + expireTimer: foundConvo.get('expireTimer') || 0, + displayNameInProfile: foundConvo.getRealSessionUsername(), encPubkeyHex: encryptionKeyPair?.publicHex || '', encSeckeyHex: encryptionKeyPair?.privateHex || '', - lastJoinedTimestamp: foundConvo.get('lastJoinedTimestamp') || 0, + lastJoinedTimestamp: foundConvo.getLastJoinedTimestamp(), }); try { window.log.debug( - `inserting into usergroup wrapper "${foundConvo.id}"... }`, + `inserting into user group wrapper "${foundConvo.id}"... }`, JSON.stringify(wrapperLegacyGroup) ); // this does the create or the update of the matching existing legacy group @@ -147,11 +159,42 @@ async function insertGroupsFromDBIntoWrapperAndRefresh( // we still let this go through } break; + case 'Group': + // The 03-group is a bit different that the others as most fields are not to be updated. + // Indeed, they are more up to date on the group's swarm than ours and we don't want to keep both in sync. + if (!PubKey.is03Pubkey(convoId)) { + throw new Error('not a 03 group'); + } + const groupInfo = { + pubkeyHex: convoId, + authData: null, // only updated when we process a new invite + invitePending: null, // only updated when we accept an invite + disappearingTimerSeconds: null, // not updated except when we process an invite/create a group + joinedAtSeconds: null, // no need to update this one except when we process an invite, maybe + name: null, // not updated except when we process an invite/create a group + secretKey: null, // not updated except when we process an promote/create a group + priority: foundConvo.getPriority() ?? null, // for 03 group, the priority is only tracked with libsession, so this is fine + }; + try { + window.log.debug( + `inserting into user group wrapper "${foundConvo.id}"... }`, + JSON.stringify(groupInfo) + ); + // this does the create or the update of the matching existing group + await UserGroupsWrapperActions.setGroup(groupInfo); + + // returned for testing purposes only + return null; + } catch (e) { + window.log.warn(`UserGroupsWrapperActions.set of ${convoId} failed with ${e.message}`); + // we still let this go through + } + break; default: assertUnreachable( convoType, - `insertGroupsFromDBIntoWrapperAndRefresh case not handeld "${convoType}"` + `insertGroupsFromDBIntoWrapperAndRefresh case not handled "${convoType}"` ); } return null; @@ -178,20 +221,6 @@ async function removeCommunityFromWrapper(_convoId: string, fullUrlWithOrWithout } } -/** - * Remove the matching legacy group from the wrapper and from the cached list of legacy groups - */ -async function removeLegacyGroupFromWrapper(groupPk: string) { - try { - await UserGroupsWrapperActions.eraseLegacyGroup(groupPk); - } catch (e) { - window.log.warn( - `UserGroupsWrapperActions.eraseLegacyGroup with = ${groupPk} failed with`, - e.message - ); - } -} - /** * This function can be used where there are things to do for all the types handled by this wrapper. * You can do a loop on all the types handled by this wrapper and have a switch using assertUnreachable to get errors when not every case is handled. @@ -202,7 +231,7 @@ async function removeLegacyGroupFromWrapper(groupPk: string) { * whole other bunch of issues because it is a native node module. */ function getUserGroupTypes(): Array { - return ['Community', 'LegacyGroup']; + return ['Community', 'LegacyGroup', 'Group']; } export const SessionUtilUserGroups = { @@ -221,5 +250,6 @@ export const SessionUtilUserGroups = { isLegacyGroupToStoreInWrapper, isLegacyGroupToRemoveFromDBIfNotInWrapper, - removeLegacyGroupFromWrapper, // a group can be removed but also just marked hidden, so only call this function when the group is completely removed // TODOLATER + // group 03 + isGroupToStoreInWrapper, }; diff --git a/ts/session/utils/libsession/libsession_utils_user_profile.ts b/ts/session/utils/libsession/libsession_utils_user_profile.ts index 07a812ce39..1d1c41c9bb 100644 --- a/ts/session/utils/libsession/libsession_utils_user_profile.ts +++ b/ts/session/utils/libsession/libsession_utils_user_profile.ts @@ -1,47 +1,42 @@ import { isEmpty } from 'lodash'; import { UserUtils } from '..'; import { SettingsKey } from '../../../data/settings-key'; +import { CONVERSATION_PRIORITIES } from '../../../models/types'; +import { stringify } from '../../../types/sqlSharedTypes'; import { Storage } from '../../../util/storage'; import { UserConfigWrapperActions } from '../../../webworker/workers/browser/libsession_worker_interface'; -import { getConversationController } from '../../conversations'; +import { ConvoHub } from '../../conversations'; import { fromHexToArray } from '../String'; -import { CONVERSATION_PRIORITIES } from '../../../models/types'; async function insertUserProfileIntoWrapper(convoId: string) { if (!SessionUtilUserProfile.isUserProfileToStoreInWrapper(convoId)) { return null; } const us = UserUtils.getOurPubKeyStrFromCache(); - const ourConvo = getConversationController().get(us); + const ourConvo = ConvoHub.use().get(us); if (!ourConvo) { throw new Error('insertUserProfileIntoWrapper needs a ourConvo to exist'); } - const dbName = ourConvo.get('displayNameInProfile') || ''; - const dbProfileUrl = ourConvo.get('avatarPointer') || ''; - const dbProfileKey = fromHexToArray(ourConvo.get('profileKey') || ''); - const priority = ourConvo.get('priority') || CONVERSATION_PRIORITIES.default; + const dbName = ourConvo.getRealSessionUsername() || ''; + const dbProfileUrl = ourConvo.getAvatarPointer() || ''; + const dbProfileKey = fromHexToArray(ourConvo.getProfileKey() || ''); + const priority = ourConvo.get('priority') || CONVERSATION_PRIORITIES.default; // this has to be a direct call to .get() and not getPriority() const areBlindedMsgRequestEnabled = !!Storage.get(SettingsKey.hasBlindedMsgRequestsEnabled); const expirySeconds = ourConvo.getExpireTimer() || 0; window.log.debug( `inserting into userprofile wrapper: username:"${dbName}", priority:${priority} image:${JSON.stringify( - { - url: dbProfileUrl, - key: dbProfileKey, - } - )}, settings: ${JSON.stringify({ - areBlindedMsgRequestEnabled, - expirySeconds, - })}` + { url: dbProfileUrl, key: stringify(dbProfileKey) } + )}, settings: ${JSON.stringify({ areBlindedMsgRequestEnabled, expirySeconds })}` ); // we don't want to throw if somehow our display name in the DB is too long here, so we use the truncated version. await UserConfigWrapperActions.setNameTruncated(dbName); await UserConfigWrapperActions.setPriority(priority); - if (dbProfileUrl && !isEmpty(dbProfileKey)) { + if (dbProfileUrl && !isEmpty(dbProfileKey) && dbProfileKey.length === 32) { await UserConfigWrapperActions.setProfilePic({ key: dbProfileKey, url: dbProfileUrl }); } else { await UserConfigWrapperActions.setProfilePic({ key: null, url: null }); diff --git a/ts/session/utils/sync/syncUtils.ts b/ts/session/utils/sync/syncUtils.ts index 7363fac741..2149115305 100644 --- a/ts/session/utils/sync/syncUtils.ts +++ b/ts/session/utils/sync/syncUtils.ts @@ -1,29 +1,12 @@ -import _, { isEmpty } from 'lodash'; -import { v4 as uuidv4 } from 'uuid'; -import { UserUtils } from '..'; -import { getMessageQueue } from '../..'; -import { Data } from '../../../data/data'; -import { OpenGroupData } from '../../../data/opengroups'; -import { ConversationModel } from '../../../models/conversation'; +import { isEmpty, isNumber, toNumber } from 'lodash'; import { SignalService } from '../../../protobuf'; -import { ECKeyPair } from '../../../receiver/keypairs'; -import { ConfigurationSyncJobDone } from '../../../shims/events'; +import { UserSyncJobDone } from '../../../shims/events'; import { ReleasedFeatures } from '../../../util/releaseFeature'; -import { Storage } from '../../../util/storage'; -import { getCompleteUrlFromRoom } from '../../apis/open_group_api/utils/OpenGroupUtils'; -import { SnodeNamespaces } from '../../apis/snode_api/namespaces'; -import { DURATION } from '../../constants'; -import { getConversationController } from '../../conversations'; + import { DisappearingMessageUpdate } from '../../disappearing_messages/types'; import { DataMessage } from '../../messages/outgoing'; -import { - ConfigurationMessage, - ConfigurationMessageClosedGroup, - ConfigurationMessageContact, -} from '../../messages/outgoing/controlMessage/ConfigurationMessage'; import { ExpirationTimerUpdateMessage } from '../../messages/outgoing/controlMessage/ExpirationTimerUpdateMessage'; import { MessageRequestResponse } from '../../messages/outgoing/controlMessage/MessageRequestResponse'; -import { SharedConfigMessage } from '../../messages/outgoing/controlMessage/SharedConfigMessage'; import { UnsendMessage } from '../../messages/outgoing/controlMessage/UnsendMessage'; import { AttachmentPointerWithUrl, @@ -31,53 +14,7 @@ import { Quote, VisibleMessage, } from '../../messages/outgoing/visibleMessage/VisibleMessage'; -import { PubKey } from '../../types'; -import { fromBase64ToArray, fromHexToArray } from '../String'; -import { ConfigurationSync } from '../job_runners/jobs/ConfigurationSyncJob'; - -const ITEM_ID_LAST_SYNC_TIMESTAMP = 'lastSyncedTimestamp'; - -const getLastSyncTimestampFromDb = async (): Promise => - (await Data.getItemById(ITEM_ID_LAST_SYNC_TIMESTAMP))?.value; - -const writeLastSyncTimestampToDb = async (timestamp: number) => - Storage.put(ITEM_ID_LAST_SYNC_TIMESTAMP, timestamp); - -/** - * Conditionally Syncs user configuration with other devices linked. - */ -export const syncConfigurationIfNeeded = async () => { - await ConfigurationSync.queueNewJobIfNeeded(); - - const userConfigLibsession = await ReleasedFeatures.checkIsUserConfigFeatureReleased(); - if (!userConfigLibsession) { - const lastSyncedTimestamp = (await getLastSyncTimestampFromDb()) || 0; - const now = Date.now(); - - // if the last sync was less than 2 days before, return early. - if (Math.abs(now - lastSyncedTimestamp) < DURATION.DAYS * 2) { - return; - } - - const allConvos = getConversationController().getConversations(); - - const configMessage = await getCurrentConfigurationMessage(allConvos); - try { - // window?.log?.info('syncConfigurationIfNeeded with', configMessage); - - await getMessageQueue().sendSyncMessage({ - namespace: SnodeNamespaces.UserMessages, - message: configMessage, - }); - } catch (e) { - window?.log?.warn('Caught an error while sending our ConfigurationMessage:', e); - // we do return early so that next time we use the old timestamp again - // and so try again to trigger a sync - return; - } - await writeLastSyncTimestampToDb(now); - } -}; +import { UserSync } from '../job_runners/jobs/UserSyncJob'; export const forceSyncConfigurationNowIfNeeded = async (waitForMessageSent = false) => { await ReleasedFeatures.checkIsUserConfigFeatureReleased(); @@ -87,212 +24,28 @@ export const forceSyncConfigurationNowIfNeeded = async (waitForMessageSent = fal resolve(false); }, 20000); - // the ConfigurationSync also handles dumping in to the DB if we do not need to push the data, but the dumping needs to be done even before the feature flag is true. - void ConfigurationSync.queueNewJobIfNeeded().catch(e => { + // the UserSync also handles dumping in to the DB if we do not need to push the data, but the dumping needs to be done even before the feature flag is true. + void UserSync.queueNewJobIfNeeded().catch(e => { window.log.warn( - 'forceSyncConfigurationNowIfNeeded scheduling of jobs ConfigurationSync.queueNewJobIfNeeded failed with: ', + 'forceSyncConfigurationNowIfNeeded scheduling of jobs UserSync.queueNewJobIfNeeded failed with: ', e.message ); }); - if (ReleasedFeatures.isUserConfigFeatureReleasedCached()) { - if (waitForMessageSent) { - window.Whisper.events.once(ConfigurationSyncJobDone, () => { - resolve(true); - }); - return; - } - resolve(true); - return; - } - const allConvos = getConversationController().getConversations(); - - // eslint-disable-next-line more/no-then - void getCurrentConfigurationMessage(allConvos) - .then(configMessage => { - // this just adds the message to the sending queue. - // if waitForMessageSent is set, we need to effectively wait until then - const callback = waitForMessageSent - ? () => { - resolve(true); - } - : undefined; - void getMessageQueue().sendSyncMessage({ - namespace: SnodeNamespaces.UserMessages, - message: configMessage, - sentCb: callback as any, - }); - // either we resolve from the callback if we need to wait for it, - // or we don't want to wait, we resolve it here. - if (!waitForMessageSent) { - resolve(true); - } - }) - .catch(e => { - window?.log?.warn('Caught an error while building our ConfigurationMessage:', e); - resolve(false); + if (waitForMessageSent) { + window.Whisper.events.once(UserSyncJobDone, () => { + resolve(true); }); - }); -}; - -const getActiveOpenGroupV2CompleteUrls = async ( - convos: Array -): Promise> => { - // Filter open groups v2 - const openGroupsV2ConvoIds = convos - .filter(c => !!c.get('active_at') && c.isOpenGroupV2() && !c.get('left')) - .map(c => c.id) as Array; - - const urls = await Promise.all( - openGroupsV2ConvoIds.map(async opengroupConvoId => { - const roomInfos = OpenGroupData.getV2OpenGroupRoom(opengroupConvoId); - - if (roomInfos) { - return getCompleteUrlFromRoom(roomInfos); - } - return null; - }) - ); - - return _.compact(urls) || []; -}; - -const getValidClosedGroups = async (convos: Array) => { - const ourPubKey = UserUtils.getOurPubKeyStrFromCache(); - - // Filter Closed/Medium groups - const closedGroupModels = convos.filter( - c => - !!c.get('active_at') && - c.isClosedGroup() && - c.get('members')?.includes(ourPubKey) && - !c.get('left') && - !c.get('isKickedFromGroup') && - !c.isBlocked() && - c.get('displayNameInProfile') - ); - - const closedGroups = await Promise.all( - closedGroupModels.map(async c => { - const groupPubKey = c.get('id'); - const fetchEncryptionKeyPair = await Data.getLatestClosedGroupEncryptionKeyPair(groupPubKey); - if (!fetchEncryptionKeyPair) { - return null; - } - - return new ConfigurationMessageClosedGroup({ - publicKey: groupPubKey, - name: c.get('displayNameInProfile') || '', - members: c.get('members') || [], - admins: c.get('groupAdmins') || [], - encryptionKeyPair: ECKeyPair.fromHexKeyPair(fetchEncryptionKeyPair), - }); - }) - ); - - const onlyValidClosedGroup = closedGroups.filter( - m => m !== null - ) as Array; - return onlyValidClosedGroup; -}; - -const getValidContacts = (convos: Array) => { - // Filter contacts - // blindedId are synced with the outbox logic. - const contactsModels = convos.filter( - c => - !!c.get('active_at') && - c.getRealSessionUsername() && - c.isPrivate() && - c.isApproved() && - !PubKey.isBlinded(c.get('id')) - ); - - const contacts = contactsModels.map(c => { - try { - const profileKey = c.get('profileKey'); - let profileKeyForContact = null; - if (typeof profileKey === 'string') { - // this will throw if the profileKey is not in hex. - try { - // for some reason, at some point, the saved profileKey is a string in base64 format - // this hack is here to update existing conversations with a non-hex profileKey to a hex format and save them - - if (!/^[0-9a-fA-F]+$/.test(profileKey)) { - throw new Error('Not Hex'); - } - profileKeyForContact = fromHexToArray(profileKey); - } catch (e) { - // if not hex, try to decode it as base64 - profileKeyForContact = fromBase64ToArray(profileKey); - // if the line above does not fail, update the stored profileKey for this convo - void c.setProfileKey(profileKeyForContact); - } - } else if (profileKey) { - window.log.warn( - 'Got a profileKey for a contact in another format than string. Contact: ', - c.id - ); - return null; - } - - return new ConfigurationMessageContact({ - publicKey: c.id as string, - displayName: c.getRealSessionUsername() || 'Anonymous', - profilePictureURL: c.get('avatarPointer'), - profileKey: !profileKeyForContact?.length ? undefined : profileKeyForContact, - isApproved: c.isApproved(), - isBlocked: c.isBlocked(), - didApproveMe: c.didApproveMe(), - }); - } catch (e) { - window?.log.warn('getValidContacts', e); - return null; + return; } - }); - return _.compact(contacts); -}; - -export const getCurrentConfigurationMessage = async ( - convos: Array -): Promise => { - const ourPubKey = UserUtils.getOurPubKeyStrFromCache(); - const ourConvo = convos.find(convo => convo.id === ourPubKey); - - const opengroupV2CompleteUrls = await getActiveOpenGroupV2CompleteUrls(convos); - const onlyValidClosedGroup = await getValidClosedGroups(convos); - const validContacts = getValidContacts(convos); - - if (!ourConvo) { - window?.log?.error('Could not find our convo while building a configuration message.'); - } - - const ourProfileKeyHex = - getConversationController().get(UserUtils.getOurPubKeyStrFromCache())?.get('profileKey') || - null; - const profileKey = ourProfileKeyHex ? fromHexToArray(ourProfileKeyHex) : undefined; - - const profilePicture = ourConvo?.get('avatarPointer') || undefined; - const displayName = ourConvo?.getRealSessionUsername() || 'Anonymous'; // this should never be undefined, but well... - - const activeOpenGroups = [...opengroupV2CompleteUrls]; - - return new ConfigurationMessage({ - identifier: uuidv4(), - timestamp: Date.now(), - activeOpenGroups, - activeClosedGroups: onlyValidClosedGroup, - displayName, - profilePicture, - profileKey, - contacts: validContacts, + resolve(true); }); }; const buildSyncVisibleMessage = ( identifier: string, dataMessage: SignalService.DataMessage, - timestamp: number, + createAtNetworkTimestamp: number, syncTarget: string, expireUpdate?: DisappearingMessageUpdate ) => { @@ -324,7 +77,7 @@ const buildSyncVisibleMessage = ( return new VisibleMessage({ identifier, - timestamp, + createAtNetworkTimestamp, attachments, body, quote, @@ -337,15 +90,15 @@ const buildSyncVisibleMessage = ( const buildSyncExpireTimerMessage = ( identifier: string, + createAtNetworkTimestamp: number, expireUpdate: DisappearingMessageUpdate, - timestamp: number, syncTarget: string ) => { const { expirationType, expirationTimer: expireTimer } = expireUpdate; return new ExpirationTimerUpdateMessage({ identifier, - timestamp, + createAtNetworkTimestamp, expirationType, expireTimer, syncTarget, @@ -355,10 +108,8 @@ const buildSyncExpireTimerMessage = ( export type SyncMessageType = | VisibleMessage | ExpirationTimerUpdateMessage - | ConfigurationMessage | MessageRequestResponse - | UnsendMessage - | SharedConfigMessage; + | UnsendMessage; export const buildSyncMessage = ( identifier: string, @@ -376,11 +127,11 @@ export const buildSyncMessage = ( const dataMessage = data instanceof DataMessage ? data.dataProto() : data; - if (!sentTimestamp || !_.isNumber(sentTimestamp)) { + if (!sentTimestamp || !isNumber(sentTimestamp)) { throw new Error('Tried to build a sync message without a sentTimestamp'); } - // don't include our profileKey on syncing message. This is to be done by a ConfigurationMessage now - const timestamp = _.toNumber(sentTimestamp); + // don't include our profileKey on syncing message. This is to be done through libsession now + const timestamp = toNumber(sentTimestamp); if ( dataMessage.flags === SignalService.DataMessage.Flags.EXPIRATION_TIMER_UPDATE && @@ -388,8 +139,8 @@ export const buildSyncMessage = ( ) { const expireTimerSyncMessage = buildSyncExpireTimerMessage( identifier, - expireUpdate, timestamp, + expireUpdate, syncTarget ); diff --git a/ts/shared/data_test_id.ts b/ts/shared/data_test_id.ts new file mode 100644 index 0000000000..29222d4a11 --- /dev/null +++ b/ts/shared/data_test_id.ts @@ -0,0 +1,8 @@ +/** + * Returns a string with all spaces replaced to '-'. + * A datatestid cannot have spaces on desktop, so we use this to format them accross the app. + * + */ +export function strToDataTestId(input: string) { + return input.replaceAll(' ', '-'); +} diff --git a/ts/shared/env_vars.ts b/ts/shared/env_vars.ts index 8d95a89cdd..2db5dc2d69 100644 --- a/ts/shared/env_vars.ts +++ b/ts/shared/env_vars.ts @@ -9,6 +9,10 @@ export function isDevProd() { return envAppInstanceIncludes('devprod'); } +export function isAutoLogin() { + return !!process.env.SESSION_AUTO_REGISTER; +} + export function isTestNet() { return envAppInstanceIncludes('testnet'); } @@ -16,3 +20,7 @@ export function isTestNet() { export function isTestIntegration() { return envAppInstanceIncludes('test-integration'); } + +export function hasClosedGroupV2QAButtons() { + return !!window.sessionFeatureFlags.useClosedGroupV2QAButtons; +} diff --git a/ts/shims/events.ts b/ts/shims/events.ts index 2ab75b3c85..45cdf4cd78 100644 --- a/ts/shims/events.ts +++ b/ts/shims/events.ts @@ -3,4 +3,4 @@ export function trigger(name: string, param1?: any, param2?: any) { } export const configurationMessageReceived = 'configurationMessageReceived'; -export const ConfigurationSyncJobDone = 'ConfigurationSyncJobDone'; +export const UserSyncJobDone = 'UserSyncJobDone'; diff --git a/ts/state/actions.ts b/ts/state/actions.ts index 8902cae871..a5609fbc8a 100644 --- a/ts/state/actions.ts +++ b/ts/state/actions.ts @@ -1,6 +1,7 @@ import { bindActionCreators, Dispatch } from '@reduxjs/toolkit'; import { actions as conversations } from './ducks/conversations'; +import { groupInfoActions } from './ducks/metaGroups'; import { actions as modalDialog } from './ducks/modalDialog'; import { actions as primaryColor } from './ducks/primaryColor'; import { actions as search } from './ducks/search'; @@ -19,6 +20,7 @@ export function mapDispatchToProps(dispatch: Dispatch): object { ...sections, ...modalDialog, ...primaryColor, + ...groupInfoActions, }, dispatch ), diff --git a/ts/state/ducks/conversations.ts b/ts/state/ducks/conversations.ts index 3f1a4a96f0..24f1a61089 100644 --- a/ts/state/ducks/conversations.ts +++ b/ts/state/ducks/conversations.ts @@ -1,16 +1,22 @@ /* eslint-disable no-restricted-syntax */ import { createAsyncThunk, createSlice, PayloadAction } from '@reduxjs/toolkit'; +import { PubkeyType } from 'libsession_util_nodejs'; import { omit, toNumber } from 'lodash'; import { ReplyingToMessageProps } from '../../components/conversation/composition/CompositionBox'; import { QuotedAttachmentType } from '../../components/conversation/message/message-content/quote/Quote'; import { Data } from '../../data/data'; -import { ConversationNotificationSettingType } from '../../models/conversationAttributes'; + +import { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + ConversationAttributes, + ConversationNotificationSettingType, +} from '../../models/conversationAttributes'; import { MessageModelType, PropsForDataExtractionNotification, PropsForMessageRequestResponse, } from '../../models/messageType'; -import { getConversationController } from '../../session/conversations'; +import { ConvoHub } from '../../session/conversations'; import { DisappearingMessages } from '../../session/disappearing_messages'; import { DisappearingMessageConversationModeType, @@ -18,7 +24,6 @@ import { } from '../../session/disappearing_messages/types'; import { ReactionList } from '../../types/Reaction'; import { resetRightOverlayMode } from './section'; -import { CONVERSATION_PRIORITIES, ConversationTypeEnum } from '../../models/types'; import { LastMessageStatusType, LastMessageType, @@ -26,6 +31,7 @@ import { PropsForInteractionNotification, } from './types'; import { AttachmentType } from '../../types/Attachment'; +import { CONVERSATION_PRIORITIES, ConversationTypeEnum } from '../../models/types'; export type MessageModelPropsWithoutConvoProps = { propsForMessage: PropsForMessageWithoutConvoProps; @@ -85,23 +91,29 @@ export type PropsForExpirationTimer = { messageId: string; }; -export type PropsForGroupUpdateGeneral = { - type: 'general'; -}; - export type PropsForGroupUpdateAdd = { type: 'add'; - added: Array; + withHistory: boolean; + added: Array; }; export type PropsForGroupUpdateKicked = { type: 'kicked'; - kicked: Array; + kicked: Array; +}; + +export type PropsForGroupUpdatePromoted = { + type: 'promoted'; + promoted: Array; +}; + +export type PropsForGroupUpdateAvatarChange = { + type: 'avatarChange'; }; export type PropsForGroupUpdateLeft = { type: 'left'; - left: Array; + left: Array; }; export type PropsForGroupUpdateName = { @@ -110,9 +122,10 @@ export type PropsForGroupUpdateName = { }; export type PropsForGroupUpdateType = - | PropsForGroupUpdateGeneral | PropsForGroupUpdateAdd | PropsForGroupUpdateKicked + | PropsForGroupUpdatePromoted + | PropsForGroupUpdateAvatarChange | PropsForGroupUpdateName | PropsForGroupUpdateLeft; @@ -215,7 +228,6 @@ export interface ReduxConversationType { isTyping?: boolean; isBlocked?: boolean; isKickedFromGroup?: boolean; - left?: boolean; avatarPath?: string | null; // absolute filepath to the avatar groupAdmins?: Array; // admins for closed groups and admins for open groups members?: Array; // members for closed groups only @@ -225,6 +237,10 @@ export interface ReduxConversationType { * If this is undefined, it means all notification are enabled */ currentNotificationSetting?: ConversationNotificationSettingType; + /** + * @see {@link ConversationAttributes#conversationIdOrigin}. + */ + conversationIdOrigin?: string; priority?: number; // undefined means 0 isInitialFetchingInProgress?: boolean; @@ -323,7 +339,7 @@ async function getMessages({ }> { const beforeTimestamp = Date.now(); - const conversation = getConversationController().get(conversationKey); + const conversation = ConvoHub.use().get(conversationKey); if (!conversation) { // no valid conversation, early return window?.log?.error('Failed to get convo on reducer.'); @@ -536,18 +552,32 @@ function handleMessagesChangedOrAdded( function handleMessageExpiredOrDeleted( state: ConversationsStateType, - payload: { - messageId: string; - conversationKey: string; - } + payload: { conversationKey: string } & ( + | { + messageId: string; + } + | { + messageHash: string; + } + ) ) { - const { conversationKey, messageId } = payload; + const { conversationKey } = payload; + const messageId = (payload as any).messageId as string | undefined; + const messageHash = (payload as any).messageHash as string | undefined; + if (conversationKey === state.selectedConversation) { // search if we find this message id. // we might have not loaded yet, so this case might not happen - const messageInStoreIndex = state?.messages.findIndex(m => m.propsForMessage.id === messageId); + const messageInStoreIndex = state?.messages.findIndex( + m => + (messageId && m.propsForMessage.id === messageId) || + (messageHash && m.propsForMessage.messageHash === messageHash) + ); const editedQuotes = { ...state.quotes }; if (messageInStoreIndex >= 0) { + const msgToRemove = state.messages[messageInStoreIndex]; + const extractedMessageId = msgToRemove.propsForMessage.id; + // we cannot edit the array directly, so slice the first part, and slice the second part, // keeping the index removed out const editedMessages = [ @@ -572,7 +602,9 @@ function handleMessageExpiredOrDeleted( messages: editedMessages, quotes: editedQuotes, firstUnreadMessageId: - state.firstUnreadMessageId === messageId ? undefined : state.firstUnreadMessageId, + state.firstUnreadMessageId === extractedMessageId + ? undefined + : state.firstUnreadMessageId, }; } @@ -584,10 +616,16 @@ function handleMessageExpiredOrDeleted( function handleMessagesExpiredOrDeleted( state: ConversationsStateType, action: PayloadAction< - Array<{ - messageId: string; - conversationKey: string; - }> + Array< + { conversationKey: string } & ( + | { + messageId: string; + } + | { + messageHash: string; + } + ) + > > ): ConversationsStateType { let stateCopy = state; @@ -732,6 +770,17 @@ const conversationsSlice = createSlice({ ) { return handleMessagesExpiredOrDeleted(state, action); }, + messageHashesExpired( + state: ConversationsStateType, + action: PayloadAction< + Array<{ + messageHash: string; + conversationKey: string; + }> + > + ) { + return handleMessagesExpiredOrDeleted(state, action); + }, messagesDeleted( state: ConversationsStateType, @@ -1066,6 +1115,7 @@ export const { conversationRemoved, removeAllConversations, messagesExpired, + messageHashesExpired, messagesDeleted, conversationReset, messagesChanged, @@ -1090,7 +1140,7 @@ export const { } = actions; async function unmarkAsForcedUnread(convoId: string) { - const convo = getConversationController().get(convoId); + const convo = ConvoHub.use().get(convoId); if (convo && convo.isMarkedUnread()) { // we just opened it and it was forced "Unread", so we reset the unread state here await convo.markAsUnread(false, true); @@ -1142,7 +1192,7 @@ export async function openConversationToSpecificMessage(args: { const mostRecentMessageIdOnOpen = await Data.getLastMessageIdInConversation(conversationKey); - // we do not care about the firstunread message id when opening to a specific message + // we do not care about the first unread message id when opening to a specific message window.inboxStore?.dispatch( actions.openConversationToSpecificMessage({ conversationKey, diff --git a/ts/state/ducks/metaGroups.ts b/ts/state/ducks/metaGroups.ts new file mode 100644 index 0000000000..b2cac4c71f --- /dev/null +++ b/ts/state/ducks/metaGroups.ts @@ -0,0 +1,1434 @@ +/* eslint-disable no-await-in-loop */ +import { createAsyncThunk, createSlice, PayloadAction } from '@reduxjs/toolkit'; +import { + GroupInfoGet, + GroupMemberGet, + GroupPubkeyType, + PubkeyType, + UserGroupsGet, + WithGroupPubkey, + WithPubkey, +} from 'libsession_util_nodejs'; +import { concat, intersection, isEmpty, uniq } from 'lodash'; +import { from_hex } from 'libsodium-wrappers-sumo'; +import { ConfigDumpData } from '../../data/configDump/configDump'; +import { HexString } from '../../node/hexStrings'; +import { SignalService } from '../../protobuf'; +import { getSwarmPollingInstance } from '../../session/apis/snode_api'; +import { StoreGroupRequestFactory } from '../../session/apis/snode_api/factories/StoreGroupRequestFactory'; +import { ConvoHub } from '../../session/conversations'; +import { getSodiumRenderer } from '../../session/crypto'; +import { DisappearingMessages } from '../../session/disappearing_messages'; +import { ClosedGroup } from '../../session/group/closed-group'; +import { GroupUpdateInfoChangeMessage } from '../../session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateInfoChangeMessage'; +import { GroupUpdateMemberChangeMessage } from '../../session/messages/outgoing/controlMessage/group_v2/to_group/GroupUpdateMemberChangeMessage'; +import { PubKey } from '../../session/types'; +import { UserUtils } from '../../session/utils'; +import { PreConditionFailed } from '../../session/utils/errors'; +import { GroupInvite } from '../../session/utils/job_runners/jobs/GroupInviteJob'; +import { GroupPendingRemovals } from '../../session/utils/job_runners/jobs/GroupPendingRemovalsJob'; +import { GroupSync } from '../../session/utils/job_runners/jobs/GroupSyncJob'; +import { UserSync } from '../../session/utils/job_runners/jobs/UserSyncJob'; +import { RunJobResult } from '../../session/utils/job_runners/PersistedJob'; +import { LibSessionUtil } from '../../session/utils/libsession/libsession_utils'; +import { ed25519Str } from '../../session/utils/String'; +import { getUserED25519KeyPairBytes } from '../../session/utils/User'; +import { stringify, toFixedUint8ArrayOfLength } from '../../types/sqlSharedTypes'; +import { + getGroupPubkeyFromWrapperType, + isMetaWrapperType, +} from '../../webworker/workers/browser/libsession_worker_functions'; +import { + MetaGroupWrapperActions, + UserGroupsWrapperActions, +} from '../../webworker/workers/browser/libsession_worker_interface'; +import { StateType } from '../reducer'; +import { openConversationWithMessages } from './conversations'; +import { resetLeftOverlayMode } from './section'; +import { ConversationTypeEnum } from '../../models/types'; +import { NetworkTime } from '../../util/NetworkTime'; +import { GroupUpdateMessageFactory } from '../../session/messages/message_factory/group/groupUpdateMessageFactory'; +import { + WithAddWithHistoryMembers, + WithAddWithoutHistoryMembers, + WithFromMemberLeftMessage, + WithRemoveMembers, +} from '../../session/types/with'; + +export type GroupState = { + infos: Record; + members: Record>; + creationFromUIPending: boolean; + memberChangesFromUIPending: boolean; + nameChangesFromUIPending: boolean; + membersInviteSending: Record>; + membersPromoteSending: Record>; +}; + +export const initialGroupState: GroupState = { + infos: {}, + members: {}, + creationFromUIPending: false, + memberChangesFromUIPending: false, + nameChangesFromUIPending: false, + membersInviteSending: {}, + membersPromoteSending: {}, +}; + +type GroupDetailsUpdate = { + groupPk: GroupPubkeyType; + infos: GroupInfoGet; + members: Array; +}; + +async function checkWeAreAdmin(groupPk: GroupPubkeyType) { + const us = UserUtils.getOurPubKeyStrFromCache(); + + const usInGroup = await MetaGroupWrapperActions.memberGet(groupPk, us); + const inUserGroup = await UserGroupsWrapperActions.getGroup(groupPk); + // if the secretKey is not empty AND we are a member of the group, we are a current admin + return Boolean(!isEmpty(inUserGroup?.secretKey) && usInGroup?.nominatedAdmin); +} + +async function checkWeAreAdminOrThrow(groupPk: GroupPubkeyType, context: string) { + const areWeAdmin = await checkWeAreAdmin(groupPk); + if (!areWeAdmin) { + throw new Error(`checkWeAreAdminOrThrow failed with ctx: ${context}`); + } +} + +/** + * Create a brand new group with a 03 prefix. + * To be called only when our current logged in user, through the UI, creates a brand new closed group given a name and a list of members. + * + */ +const initNewGroupInWrapper = createAsyncThunk( + 'group/initNewGroupInWrapper', + async ( + { + groupName, + members, + us, + }: { + groupName: string; + members: Array; + us: string; + }, + { dispatch } + ): Promise => { + if (!members.includes(us)) { + throw new PreConditionFailed('initNewGroupInWrapper needs us to be a member'); + } + if (members.some(k => !PubKey.is05Pubkey(k))) { + throw new PreConditionFailed('initNewGroupInWrapper only works with members being pubkeys'); + } + const uniqMembers = uniq(members) as Array; // the if just above ensures that this is fine + const newGroup = await UserGroupsWrapperActions.createGroup(); + const groupPk = newGroup.pubkeyHex; + + try { + const groupSecretKey = newGroup.secretKey; + if (!groupSecretKey) { + throw new Error('groupSecretKey was empty just after creation.'); + } + newGroup.name = groupName; // this will be used by the linked devices until they fetch the info from the groups swarm + // the `GroupSync` below will need the secretKey of the group to be saved in the wrapper. So save it! + await UserGroupsWrapperActions.setGroup(newGroup); + const ourEd25519KeyPairBytes = await UserUtils.getUserED25519KeyPairBytes(); + if (!ourEd25519KeyPairBytes) { + throw new Error('Current user has no priv ed25519 key?'); + } + const userEd25519SecretKey = ourEd25519KeyPairBytes.privKeyBytes; + const groupEd2519Pk = HexString.fromHexString(groupPk).slice(1); // remove the 03 prefix (single byte once in hex form) + + // dump is always empty when creating a new groupInfo + await MetaGroupWrapperActions.init(groupPk, { + metaDumped: null, + userEd25519Secretkey: toFixedUint8ArrayOfLength(userEd25519SecretKey, 64).buffer, + groupEd25519Secretkey: newGroup.secretKey, + groupEd25519Pubkey: toFixedUint8ArrayOfLength(groupEd2519Pk, 32).buffer, + }); + + for (let index = 0; index < uniqMembers.length; index++) { + const member = uniqMembers[index]; + const convoMember = ConvoHub.use().get(member); + const displayName = convoMember?.getRealSessionUsername() || null; + const profileKeyHex = convoMember?.getProfileKey() || null; + const avatarUrl = convoMember?.getAvatarPointer() || null; + + // we just create the members in the state. Their invite state defaults to NOT_SENT, + // which will make our logic kick in to send them an invite in the `GroupInviteJob` + await LibSessionUtil.createMemberAndSetDetails({ + avatarUrl, + displayName, + groupPk, + memberPubkey: member, + profileKeyHex, + }); + + if (member === us) { + // we need to explicitly mark us as having accepted the promotion + await MetaGroupWrapperActions.memberSetPromotionAccepted(groupPk, member); + } + } + + const infos = await MetaGroupWrapperActions.infoGet(groupPk); + if (!infos) { + throw new Error(`getInfos of ${groupPk} returned empty result even if it was just init.`); + } + infos.name = groupName; + await MetaGroupWrapperActions.infoSet(groupPk, infos); + + const membersFromWrapper = await MetaGroupWrapperActions.memberGetAll(groupPk); + if (!membersFromWrapper || isEmpty(membersFromWrapper)) { + throw new Error( + `memberGetAll of ${groupPk} returned empty result even if it was just init.` + ); + } + // now that we've added members to the group, make sure to make a full key rotation + // to include them and marks the corresponding wrappers as dirty + await MetaGroupWrapperActions.keyRekey(groupPk); + + const convo = await ConvoHub.use().getOrCreateAndWait(groupPk, ConversationTypeEnum.GROUPV2); + await convo.setIsApproved(true, false); + await convo.commit(); // commit here too, as the poll needs it to be approved + let groupMemberChange: GroupUpdateMemberChangeMessage | null = null; + // push one group change message were initial members are added to the group + if (membersFromWrapper.length) { + const membersHex = uniq(membersFromWrapper.map(m => m.pubkeyHex)); + const sentAt = NetworkTime.now(); + const msgModel = await ClosedGroup.addUpdateMessage({ + diff: { type: 'add', added: membersHex, withHistory: false }, + expireUpdate: null, + sender: us, + sentAt, + convo, + markAlreadySent: false, // the store below will mark the message as sent with dbMsgIdentifier + }); + groupMemberChange = await GroupUpdateMessageFactory.getWithoutHistoryControlMessage({ + adminSecretKey: groupSecretKey, + convo, + groupPk, + withoutHistory: membersHex, + createAtNetworkTimestamp: sentAt, + dbMsgIdentifier: msgModel.id, + }); + } + + const extraStoreRequests = await StoreGroupRequestFactory.makeGroupMessageSubRequest( + [groupMemberChange], + { authData: null, secretKey: newGroup.secretKey } + ); + + const result = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + extraStoreRequests, + }); + if (result !== RunJobResult.Success) { + window.log.warn('GroupSync.pushChangesToGroupSwarmIfNeeded during create failed'); + throw new Error('failed to pushChangesToGroupSwarmIfNeeded'); + } + + await convo.commit(); + + getSwarmPollingInstance().addGroupId(new PubKey(groupPk)); + + await convo.unhideIfNeeded(); + convo.set({ active_at: Date.now() }); + await convo.commit(); + convo.updateLastMessage(); + dispatch(resetLeftOverlayMode()); + + // Everything is setup for this group, we now need to send the invites to each members, + // privately and asynchronously, and gracefully handle errors with toasts. + // Let's do all of this part of a job to handle app crashes and make sure we + // can update the group wrapper with a failed state if a message fails to be sent. + await scheduleGroupInviteJobs( + groupPk, + membersFromWrapper.map(m => m.pubkeyHex), + [], + window.sessionFeatureFlags.useGroupV2InviteAsAdmin + ); + + await openConversationWithMessages({ conversationKey: groupPk, messageId: null }); + + return { groupPk: newGroup.pubkeyHex, infos, members: membersFromWrapper }; + } catch (e) { + window.log.warn('group creation failed. Deleting already saved data: ', e.message); + await UserGroupsWrapperActions.eraseGroup(groupPk); + await MetaGroupWrapperActions.infoDestroy(groupPk); + const foundConvo = ConvoHub.use().get(groupPk); + if (foundConvo) { + await ConvoHub.use().deleteGroup(groupPk, { + fromSyncMessage: false, + sendLeaveMessage: false, + deletionType: 'doNotKeep', + deleteAllMessagesOnSwarm: false, + forceDestroyForAllMembers: false, + }); + } + throw e; + } + } +); + +/** + * Create a brand new group with a 03 prefix. + * To be called only when our current logged in user, through the UI, creates a brand new closed group given a name and a list of members. + * + */ +const handleUserGroupUpdate = createAsyncThunk( + 'group/handleUserGroupUpdate', + async (userGroup: UserGroupsGet, payloadCreator): Promise => { + // if we already have a state for that group here, it means that group was already init, and the data should come from the groupInfos after. + const state = payloadCreator.getState() as StateType; + const groupPk = userGroup.pubkeyHex; + if (state.groups.infos[groupPk] && state.groups.members[groupPk]) { + window.log.info('handleUserGroupUpdate group already present in redux slice'); + return { + groupPk, + infos: await MetaGroupWrapperActions.infoGet(groupPk), + members: await MetaGroupWrapperActions.memberGetAll(groupPk), + }; + } + + const ourEd25519KeyPairBytes = await UserUtils.getUserED25519KeyPairBytes(); + if (!ourEd25519KeyPairBytes) { + throw new Error('Current user has no priv ed25519 key?'); + } + const userEd25519SecretKey = ourEd25519KeyPairBytes.privKeyBytes; + const groupEd2519Pk = HexString.fromHexString(groupPk).slice(1); // remove the 03 prefix (single byte once in hex form) + + // dump is always empty when creating a new groupInfo + try { + await MetaGroupWrapperActions.init(groupPk, { + metaDumped: null, + userEd25519Secretkey: toFixedUint8ArrayOfLength(userEd25519SecretKey, 64).buffer, + groupEd25519Secretkey: userGroup.secretKey, + groupEd25519Pubkey: toFixedUint8ArrayOfLength(groupEd2519Pk, 32).buffer, + }); + } catch (e) { + window.log.warn(`failed to init meta wrapper ${groupPk}`); + } + + const convo = await ConvoHub.use().getOrCreateAndWait(groupPk, ConversationTypeEnum.GROUPV2); + + // a group is approved when its invitePending is false, and false otherwise + await convo.setIsApproved(!userGroup.invitePending, false); + + await convo.setPriorityFromWrapper(userGroup.priority, false); + + if (!convo.isActive()) { + convo.set({ + active_at: Date.now(), + }); + } + + convo.set({ + displayNameInProfile: userGroup.name || undefined, + }); + + await convo.commit(); + + return { + groupPk, + infos: await MetaGroupWrapperActions.infoGet(groupPk), + members: await MetaGroupWrapperActions.memberGetAll(groupPk), + }; + } +); + +/** + * Called only when the app just loaded the SessionInbox (i.e. user logged in and fully loaded). + * This function populates the slice with any meta-dumps we have in the DB, if they also are part of what is the user group wrapper tracking. + * + */ +const loadMetaDumpsFromDB = createAsyncThunk( + 'group/loadMetaDumpsFromDB', + async (): Promise> => { + const ed25519KeyPairBytes = await getUserED25519KeyPairBytes(); + if (!ed25519KeyPairBytes?.privKeyBytes) { + throw new Error('user has no ed25519KeyPairBytes.'); + } + + const variantsWithData = await ConfigDumpData.getAllDumpsWithData(); + const allUserGroups = await UserGroupsWrapperActions.getAllGroups(); + const toReturn: Array = []; + for (let index = 0; index < variantsWithData.length; index++) { + const { variant, data } = variantsWithData[index]; + if (!isMetaWrapperType(variant)) { + continue; + } + const groupPk = getGroupPubkeyFromWrapperType(variant); + const groupEd25519Pubkey = HexString.fromHexString(groupPk.substring(2)); + const foundInUserWrapper = allUserGroups.find(m => m.pubkeyHex === groupPk); + if (!foundInUserWrapper) { + try { + window.log.info( + 'metaGroup not found in userGroups. Deleting the corresponding dumps:', + groupPk + ); + + await ConfigDumpData.deleteDumpFor(groupPk); + } catch (e) { + window.log.warn(`ConfigDumpData.deleteDumpFor for ${groupPk} failed with `, e.message); + } + continue; + } + + try { + window.log.debug('loadMetaDumpsFromDB init from meta group dump', variant); + + await MetaGroupWrapperActions.init(groupPk, { + groupEd25519Pubkey: toFixedUint8ArrayOfLength(groupEd25519Pubkey, 32).buffer, + groupEd25519Secretkey: foundInUserWrapper?.secretKey || null, + userEd25519Secretkey: toFixedUint8ArrayOfLength(ed25519KeyPairBytes.privKeyBytes, 64) + .buffer, + metaDumped: data, + }); + + const infos = await MetaGroupWrapperActions.infoGet(groupPk); + const members = await MetaGroupWrapperActions.memberGetAll(groupPk); + + toReturn.push({ groupPk, infos, members }); + } catch (e) { + // Note: Don't re trow here, we want to load everything we can + window.log.error( + `initGroup of Group wrapper of variant ${variant} failed with ${e.message} ` + ); + } + } + + return toReturn; + } +); + +/** + * This action is to be called when we get a merge event from the network. + * It refreshes the state of that particular group (info & members) with the state from the wrapper after the merge is done. + */ +const refreshGroupDetailsFromWrapper = createAsyncThunk( + 'group/refreshGroupDetailsFromWrapper', + async ({ + groupPk, + }: { + groupPk: GroupPubkeyType; + }): Promise< + GroupDetailsUpdate | ({ groupPk: GroupPubkeyType } & Partial) + > => { + try { + const infos = await MetaGroupWrapperActions.infoGet(groupPk); + const members = await MetaGroupWrapperActions.memberGetAll(groupPk); + + return { groupPk, infos, members }; + } catch (e) { + window.log.warn('refreshGroupDetailsFromWrapper failed with ', e.message); + return { groupPk }; + } + } +); + +function validateMemberAddChange({ + groupPk, + withHistory: addMembersWithHistory, + withoutHistory: addMembersWithoutHistory, +}: WithGroupPubkey & WithAddWithoutHistoryMembers & WithAddWithHistoryMembers) { + const us = UserUtils.getOurPubKeyStrFromCache(); + if (addMembersWithHistory.includes(us) || addMembersWithoutHistory.includes(us)) { + throw new PreConditionFailed( + 'currentDeviceGroupMembersChange cannot be used for changes of our own state in the group' + ); + } + + const withHistory = uniq(addMembersWithHistory); + const withoutHistory = uniq(addMembersWithoutHistory); + const convo = ConvoHub.use().get(groupPk); + if (!convo) { + throw new PreConditionFailed('currentDeviceGroupMembersChange convo not present in convo hub'); + } + if (intersection(withHistory, withoutHistory).length) { + throw new Error( + 'withHistory and withoutHistory can only have values which are not in the other' + ); + } + + return { withoutHistory, withHistory, us, convo }; +} + +function validateMemberRemoveChange({ + groupPk, + removed: removeMembers, +}: WithGroupPubkey & WithRemoveMembers) { + const us = UserUtils.getOurPubKeyStrFromCache(); + if (removeMembers.includes(us)) { + throw new PreConditionFailed( + 'currentDeviceGroupMembersChange cannot be used for changes of our own state in the group' + ); + } + + const removed = uniq(removeMembers); + const convo = ConvoHub.use().get(groupPk); + if (!convo) { + throw new PreConditionFailed('currentDeviceGroupMembersChange convo not present in convo hub'); + } + + return { removed, us, convo }; +} + +function validateNameChange({ + groupPk, + newName, + currentName, +}: WithGroupPubkey & { newName: string; currentName: string }) { + const us = UserUtils.getOurPubKeyStrFromCache(); + if (!newName || isEmpty(newName)) { + throw new PreConditionFailed('validateNameChange needs a non empty name'); + } + + const convo = ConvoHub.use().get(groupPk); + if (!convo) { + throw new PreConditionFailed('validateNameChange convo not present in convo hub'); + } + if (newName === currentName) { + throw new PreConditionFailed('validateNameChange no name change detected'); + } + + return { newName, us, convo }; +} + +/** + * Update the GROUP_MEMBER wrapper state to have those members. + * @returns the supplementalKeys to be pushed + */ +async function handleWithHistoryMembers({ + groupPk, + withHistory, +}: WithGroupPubkey & { + withHistory: Array; +}) { + for (let index = 0; index < withHistory.length; index++) { + const member = withHistory[index]; + + const convoMember = ConvoHub.use().get(member); + const displayName = convoMember?.getRealSessionUsername() || null; + const profileKeyHex = convoMember?.getProfileKey() || null; + const avatarUrl = convoMember?.getAvatarPointer() || null; + + await LibSessionUtil.createMemberAndSetDetails({ + avatarUrl, + displayName, + groupPk, + memberPubkey: member, + profileKeyHex, + }); + await MetaGroupWrapperActions.memberSetInvited(groupPk, member, false); + } + const encryptedSupplementKeys = withHistory.length + ? await MetaGroupWrapperActions.generateSupplementKeys(groupPk, withHistory) + : null; + return encryptedSupplementKeys; +} + +/** + * Update the GROUP_MEMBER wrapper state to have those members. + * Calls rekey() if at least one was present in the list. + */ +async function handleWithoutHistoryMembers({ + groupPk, + withoutHistory, +}: WithGroupPubkey & WithAddWithoutHistoryMembers) { + for (let index = 0; index < withoutHistory.length; index++) { + const member = withoutHistory[index]; + const convoMember = ConvoHub.use().get(member); + const displayName = convoMember?.getRealSessionUsername() || null; + const profileKeyHex = convoMember?.getProfileKey() || null; + const avatarUrl = convoMember?.getAvatarPointer() || null; + + await LibSessionUtil.createMemberAndSetDetails({ + groupPk, + memberPubkey: member, + avatarUrl, + displayName, + profileKeyHex, + }); + await MetaGroupWrapperActions.memberSetInvited(groupPk, member, false); + } + + if (!isEmpty(withoutHistory)) { + await MetaGroupWrapperActions.keyRekey(groupPk); + } +} + +async function handleMemberAddedFromUI({ + addMembersWithHistory, + addMembersWithoutHistory, + groupPk, +}: WithGroupPubkey & { + addMembersWithHistory: Array; + addMembersWithoutHistory: Array; +}) { + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!group || !group.secretKey || isEmpty(group.secretKey)) { + throw new Error('tried to make change to group but we do not have the admin secret key'); + } + + await checkWeAreAdminOrThrow(groupPk, 'handleMemberAddedFromUIOrNot'); + + const { withHistory, withoutHistory, convo, us } = validateMemberAddChange({ + withHistory: addMembersWithHistory, + withoutHistory: addMembersWithoutHistory, + groupPk, + }); + // first, get the unrevoke requests for people who are added + const { revokeSubRequest, unrevokeSubRequest } = + await GroupPendingRemovals.getPendingRevokeParams({ + groupPk, + withHistory, + withoutHistory, + removed: [], + secretKey: group.secretKey, + }); + + // then, handle the addition with history of messages by generating supplement keys. + // this adds them to the members wrapper etc + const encryptedSupplementKeys = await handleWithHistoryMembers({ groupPk, withHistory }); + + const supplementalKeysSubRequest = StoreGroupRequestFactory.makeStoreGroupKeysSubRequest({ + group, + encryptedSupplementKeys, + }); + + // then handle the addition without history of messages (full rotation of keys). + // this adds them to the members wrapper etc + await handleWithoutHistoryMembers({ groupPk, withoutHistory }); + const createAtNetworkTimestamp = NetworkTime.now(); + + await LibSessionUtil.saveDumpsToDb(groupPk); + + const expireDetails = DisappearingMessages.getExpireDetailsForOutgoingMessage( + convo, + createAtNetworkTimestamp + ); + const shared = { + convo, + sender: us, + sentAt: createAtNetworkTimestamp, + expireUpdate: expireDetails, + markAlreadySent: false, // the store below will mark the message as sent with dbMsgIdentifier + }; + const updateMessagesToPush: Array = []; + if (withHistory.length) { + const msgModel = await ClosedGroup.addUpdateMessage({ + diff: { type: 'add', added: withHistory, withHistory: true }, + ...shared, + }); + const groupChange = await GroupUpdateMessageFactory.getWithHistoryControlMessage({ + adminSecretKey: group.secretKey, + convo, + groupPk, + withHistory, + createAtNetworkTimestamp, + dbMsgIdentifier: msgModel.id, + }); + if (groupChange) { + updateMessagesToPush.push(groupChange); + } + } + if (withoutHistory.length) { + const msgModel = await ClosedGroup.addUpdateMessage({ + diff: { type: 'add', added: withoutHistory, withHistory: false }, + ...shared, + }); + const groupChange = await GroupUpdateMessageFactory.getWithoutHistoryControlMessage({ + adminSecretKey: group.secretKey, + convo, + groupPk, + withoutHistory, + createAtNetworkTimestamp, + dbMsgIdentifier: msgModel.id, + }); + if (groupChange) { + updateMessagesToPush.push(groupChange); + } + } + + const extraStoreRequests = await StoreGroupRequestFactory.makeGroupMessageSubRequest( + updateMessagesToPush, + group + ); + + // push new members & key supplement in a single batch call + const sequenceResult = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + supplementalKeysSubRequest, + revokeSubRequest, + unrevokeSubRequest, + extraStoreRequests, + }); + if (sequenceResult !== RunJobResult.Success) { + throw new Error( + 'handleMemberAddedFromUIOrNot: pushChangesToGroupSwarmIfNeeded did not return success' + ); + } + + // schedule send invite details, auth signature, etc. to the new users + await scheduleGroupInviteJobs( + groupPk, + withHistory, + withoutHistory, + window.sessionFeatureFlags.useGroupV2InviteAsAdmin + ); + await LibSessionUtil.saveDumpsToDb(groupPk); + + convo.set({ + active_at: createAtNetworkTimestamp, + }); + + await convo.commit(); +} + +/** + * This function is called in two cases: + * - to update the state when kicking a member from the group from the UI + * - to update the state when handling a MEMBER_LEFT message + */ +async function handleMemberRemovedFromUI({ + groupPk, + removeMembers, + fromMemberLeftMessage, + alsoRemoveMessages, +}: WithFromMemberLeftMessage & + WithGroupPubkey & { + removeMembers: Array; + alsoRemoveMessages: boolean; + }) { + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!group || !group.secretKey || isEmpty(group.secretKey)) { + throw new Error('tried to make change to group but we do not have the admin secret key'); + } + + await checkWeAreAdminOrThrow(groupPk, 'handleMemberRemovedFromUI'); + + if (removeMembers.length === 0) { + window.log.debug('handleMemberRemovedFromUI: removeMembers is empty'); + + return; + } + + const { removed, convo, us } = validateMemberRemoveChange({ + groupPk, + removed: removeMembers, + }); + + if (removed.length === 0) { + window.log.debug('handleMemberRemovedFromUI: removeMembers after validation is empty'); + + return; + } + + // We need to mark the member as "pending removal" so any admins (including us) can deal with it as soon as possible + await MetaGroupWrapperActions.membersMarkPendingRemoval(groupPk, removed, alsoRemoveMessages); + await LibSessionUtil.saveDumpsToDb(groupPk); + + // We don't revoke the member's token right away. Instead we schedule a `GroupPendingRemovals` + // which will deal with the revokes of all of them together. + await GroupPendingRemovals.addJob({ groupPk }); + + // Build a GroupUpdateMessage to be sent if that member was kicked by us. + const createAtNetworkTimestamp = NetworkTime.now(); + const expiringDetails = DisappearingMessages.getExpireDetailsForOutgoingMessage( + convo, + createAtNetworkTimestamp + ); + let removedControlMessage: GroupUpdateMemberChangeMessage | null = null; + + // We only add/send a message if that user didn't leave but was explicitly kicked. + // When we leaves by himself, he sends a GroupUpdateMessage. + if (!fromMemberLeftMessage) { + const msgModel = await ClosedGroup.addUpdateMessage({ + diff: { type: 'kicked', kicked: removed }, + convo, + sender: us, + sentAt: createAtNetworkTimestamp, + expireUpdate: { + expirationTimer: expiringDetails.expireTimer, + expirationType: expiringDetails.expirationType, + messageExpirationFromRetrieve: + expiringDetails.expireTimer > 0 + ? createAtNetworkTimestamp + expiringDetails.expireTimer + : null, + }, + markAlreadySent: false, // the store below will mark the message as sent using dbMsgIdentifier + }); + removedControlMessage = await GroupUpdateMessageFactory.getRemovedControlMessage({ + adminSecretKey: group.secretKey, + convo, + groupPk, + removed, + createAtNetworkTimestamp, + fromMemberLeftMessage, + dbMsgIdentifier: msgModel.id, + }); + } + + // build the request for that GroupUpdateMessage if needed + const extraStoreRequests = await StoreGroupRequestFactory.makeGroupMessageSubRequest( + [removedControlMessage], + group + ); + + // Send the updated config (with changes to pending_removal) and that GroupUpdateMessage request (if any) as a sequence. + const sequenceResult = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + extraStoreRequests, + }); + if (sequenceResult !== RunJobResult.Success) { + throw new Error( + 'currentDeviceGroupMembersChange: pushChangesToGroupSwarmIfNeeded did not return success' + ); + } + + await LibSessionUtil.saveDumpsToDb(groupPk); + + convo.set({ + active_at: createAtNetworkTimestamp, + }); + await convo.commit(); +} + +async function handleNameChangeFromUI({ + groupPk, + newName: uncheckedName, +}: WithGroupPubkey & { + newName: string; +}) { + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!group || !group.secretKey || isEmpty(group.secretKey)) { + throw new Error('tried to make change to group but we do not have the admin secret key'); + } + const infos = await MetaGroupWrapperActions.infoGet(groupPk); + if (!infos) { + throw new PreConditionFailed('nameChange infoGet is empty'); + } + + await checkWeAreAdminOrThrow(groupPk, 'handleNameChangeFromUIOrNot'); + + // this throws if the name is the same, or empty + const { newName, convo, us } = validateNameChange({ + newName: uncheckedName, + currentName: group.name || '', + groupPk, + }); + + group.name = newName; + infos.name = newName; + await UserGroupsWrapperActions.setGroup(group); + await MetaGroupWrapperActions.infoSet(groupPk, infos); + const createAtNetworkTimestamp = NetworkTime.now(); + + // we want to add an update message even if the change was done remotely + const msg = await ClosedGroup.addUpdateMessage({ + convo, + diff: { type: 'name', newName }, + sender: us, + sentAt: createAtNetworkTimestamp, + expireUpdate: DisappearingMessages.getExpireDetailsForOutgoingMessage( + convo, + createAtNetworkTimestamp + ), + markAlreadySent: false, // the store below will mark the message as sent with dbMsgIdentifier + }); + + // we want to send an update only if the change was made locally. + const nameChangeMsg = new GroupUpdateInfoChangeMessage({ + groupPk, + typeOfChange: SignalService.GroupUpdateInfoChangeMessage.Type.NAME, + updatedName: newName, + identifier: msg.id, + createAtNetworkTimestamp, + secretKey: group.secretKey, + sodium: await getSodiumRenderer(), + ...DisappearingMessages.getExpireDetailsForOutgoingMessage(convo, createAtNetworkTimestamp), + }); + + const extraStoreRequests = await StoreGroupRequestFactory.makeGroupMessageSubRequest( + [nameChangeMsg], + group + ); + + const batchResult = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + extraStoreRequests, + }); + + if (batchResult !== RunJobResult.Success) { + throw new Error( + 'handleNameChangeFromUIOrNot: pushChangesToGroupSwarmIfNeeded did not return success' + ); + } + + await UserSync.queueNewJobIfNeeded(); + + convo.set({ + active_at: createAtNetworkTimestamp, + }); + await convo.commit(); +} + +/** + * This action is used to trigger a change when the local user does a change to a group v2 members list. + * GroupV2 added members can be added two ways: with and without the history of messages. + * GroupV2 removed members have their sub account token revoked on the server side so they cannot poll anymore from the group's swarm. + */ +const currentDeviceGroupMembersChange = createAsyncThunk( + 'group/currentDeviceGroupMembersChange', + async ( + { + groupPk, + ...args + }: { + groupPk: GroupPubkeyType; + addMembersWithHistory: Array; + addMembersWithoutHistory: Array; + removeMembers: Array; + alsoRemoveMessages: boolean; + }, + payloadCreator + ): Promise => { + const state = payloadCreator.getState() as StateType; + if (!state.groups.infos[groupPk] || !state.groups.members[groupPk]) { + throw new PreConditionFailed( + 'currentDeviceGroupMembersChange group not present in redux slice' + ); + } + + await handleMemberRemovedFromUI({ + groupPk, + removeMembers: args.removeMembers, + fromMemberLeftMessage: false, + alsoRemoveMessages: args.alsoRemoveMessages, + }); + + await handleMemberAddedFromUI({ + groupPk, + addMembersWithHistory: args.addMembersWithHistory, + addMembersWithoutHistory: args.addMembersWithoutHistory, + }); + + return { + groupPk, + infos: await MetaGroupWrapperActions.infoGet(groupPk), + members: await MetaGroupWrapperActions.memberGetAll(groupPk), + }; + } +); + +const triggerFakeAvatarUpdate = createAsyncThunk( + 'group/triggerFakeAvatarUpdate', + async ( + { + groupPk, + }: { + groupPk: GroupPubkeyType; + }, + payloadCreator + ): Promise => { + const state = payloadCreator.getState() as StateType; + if (!state.groups.infos[groupPk]) { + throw new PreConditionFailed('triggerFakeAvatarUpdate group not present in redux slice'); + } + const convo = ConvoHub.use().get(groupPk); + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!convo || !group || !group.secretKey || isEmpty(group.secretKey)) { + throw new Error( + 'triggerFakeAvatarUpdate: tried to make change to group but we do not have the admin secret key' + ); + } + + const createAtNetworkTimestamp = NetworkTime.now(); + const expireUpdate = DisappearingMessages.getExpireDetailsForOutgoingMessage( + convo, + createAtNetworkTimestamp + ); + const msgModel = await ClosedGroup.addUpdateMessage({ + diff: { type: 'avatarChange' }, + expireUpdate, + sender: UserUtils.getOurPubKeyStrFromCache(), + sentAt: createAtNetworkTimestamp, + convo, + markAlreadySent: false, // the store below will mark the message as sent with dbMsgIdentifier + }); + + await msgModel.commit(); + const updateMsg = new GroupUpdateInfoChangeMessage({ + createAtNetworkTimestamp, + typeOfChange: SignalService.GroupUpdateInfoChangeMessage.Type.AVATAR, + ...expireUpdate, + groupPk, + identifier: msgModel.id, + secretKey: group.secretKey, + sodium: await getSodiumRenderer(), + }); + + const extraStoreRequests = await StoreGroupRequestFactory.makeGroupMessageSubRequest( + [updateMsg], + group + ); + + const batchResult = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + extraStoreRequests, + }); + if (!batchResult) { + window.log.warn(`failed to send avatarChange message for group ${ed25519Str(groupPk)}`); + throw new Error('failed to send avatarChange message'); + } + } +); + +const triggerFakeDeleteMsgBeforeNow = createAsyncThunk( + 'group/triggerFakeDeleteMsgBeforeNow', + async ( + { + groupPk, + messagesWithAttachmentsOnly, + }: { + groupPk: GroupPubkeyType; + messagesWithAttachmentsOnly: boolean; + }, + payloadCreator + ): Promise => { + const state = payloadCreator.getState() as StateType; + if (!state.groups.infos[groupPk]) { + throw new PreConditionFailed( + 'triggerFakeDeleteMsgBeforeNow group not present in redux slice' + ); + } + const convo = ConvoHub.use().get(groupPk); + const group = await UserGroupsWrapperActions.getGroup(groupPk); + if (!convo || !group || !group.secretKey || isEmpty(group.secretKey)) { + throw new Error( + 'triggerFakeDeleteMsgBeforeNow: tried to make change to group but we do not have the admin secret key' + ); + } + + const nowSeconds = Math.floor(NetworkTime.now() / 1000); + const infoGet = await MetaGroupWrapperActions.infoGet(groupPk); + if (messagesWithAttachmentsOnly) { + infoGet.deleteAttachBeforeSeconds = nowSeconds; + } else { + infoGet.deleteBeforeSeconds = nowSeconds; + } + + await MetaGroupWrapperActions.infoSet(groupPk, infoGet); + + const extraStoreRequests = await StoreGroupRequestFactory.makeGroupMessageSubRequest([], group); + + const batchResult = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + extraStoreRequests, + }); + if (!batchResult) { + window.log.warn( + `failed to send deleteBeforeSeconds/deleteAttachBeforeSeconds message for group ${ed25519Str(groupPk)}` + ); + throw new Error('failed to send deleteBeforeSeconds/deleteAttachBeforeSeconds message'); + } + } +); + +/** + * This action is used to trigger a change when the local user does a change to a group v2 members list. + * GroupV2 added members can be added two ways: with and without the history of messages. + * GroupV2 removed members have their sub account token revoked on the server side so they cannot poll anymore from the group's swarm. + */ +const handleMemberLeftMessage = createAsyncThunk( + 'group/handleMemberLeftMessage', + async ( + { + groupPk, + memberLeft, + }: { + groupPk: GroupPubkeyType; + memberLeft: PubkeyType; + }, + payloadCreator + ): Promise => { + const state = payloadCreator.getState() as StateType; + if (!state.groups.infos[groupPk] || !state.groups.members[groupPk]) { + throw new PreConditionFailed( + 'currentDeviceGroupMembersChange group not present in redux slice' + ); + } + + if (await checkWeAreAdmin(groupPk)) { + await handleMemberRemovedFromUI({ + groupPk, + removeMembers: [memberLeft], + fromMemberLeftMessage: true, + alsoRemoveMessages: false, + }); + } + + return { + groupPk, + infos: await MetaGroupWrapperActions.infoGet(groupPk), + members: await MetaGroupWrapperActions.memberGetAll(groupPk), + }; + } +); + +const inviteResponseReceived = createAsyncThunk( + 'group/inviteResponseReceived', + async ( + { + groupPk, + member, + }: { + groupPk: GroupPubkeyType; + member: PubkeyType; + }, + payloadCreator + ): Promise => { + const state = payloadCreator.getState() as StateType; + if (!state.groups.infos[groupPk] || !state.groups.members[groupPk]) { + throw new PreConditionFailed('inviteResponseReceived group but not present in redux slice'); + } + try { + await checkWeAreAdminOrThrow(groupPk, 'inviteResponseReceived'); + + await MetaGroupWrapperActions.memberSetAccepted(groupPk, member); + try { + const memberConvo = ConvoHub.use().get(member); + if (memberConvo) { + const memberName = memberConvo.getRealSessionUsername(); + if (memberName) { + await MetaGroupWrapperActions.memberSetNameTruncated(groupPk, member, memberName); + } + const profilePicUrl = memberConvo.getAvatarPointer(); + const profilePicKey = memberConvo.getProfileKey(); + if (profilePicUrl && profilePicKey) { + await MetaGroupWrapperActions.memberSetProfilePicture(groupPk, member, { + key: from_hex(profilePicKey), + url: profilePicUrl, + }); + } + } + } catch (eMemberUpdate) { + window.log.warn( + `failed to update member details on inviteResponse received in group:${ed25519Str(groupPk)}, member:${ed25519Str(member)}, error:${eMemberUpdate.message}` + ); + } + await GroupSync.queueNewJobIfNeeded(groupPk); + } catch (e) { + window.log.info('inviteResponseReceived failed with', e.message); + // only admins can do the steps above, but we don't want to throw if we are not an admin + } + + return { + groupPk, + infos: await MetaGroupWrapperActions.infoGet(groupPk), + members: await MetaGroupWrapperActions.memberGetAll(groupPk), + }; + } +); + +const currentDeviceGroupNameChange = createAsyncThunk( + 'group/currentDeviceGroupNameChange', + async ( + { + groupPk, + ...args + }: { + groupPk: GroupPubkeyType; + newName: string; + }, + payloadCreator + ): Promise => { + const state = payloadCreator.getState() as StateType; + if (!state.groups.infos[groupPk] || !state.groups.members[groupPk]) { + throw new PreConditionFailed('currentDeviceGroupNameChange group not present in redux slice'); + } + await checkWeAreAdminOrThrow(groupPk, 'currentDeviceGroupNameChange'); + + await handleNameChangeFromUI({ groupPk, ...args }); + + return { + groupPk, + infos: await MetaGroupWrapperActions.infoGet(groupPk), + members: await MetaGroupWrapperActions.memberGetAll(groupPk), + }; + } +); + +function deleteGroupPkEntriesFromState(state: GroupState, groupPk: GroupPubkeyType) { + delete state.infos[groupPk]; + delete state.members[groupPk]; + delete state.membersInviteSending[groupPk]; + delete state.membersPromoteSending[groupPk]; +} + +function applySendingStateChange({ + groupPk, + pubkey, + sending, + state, + changeType, +}: WithGroupPubkey & + WithPubkey & { sending: boolean; changeType: 'invite' | 'promote'; state: GroupState }) { + if (changeType === 'invite' && !state.membersInviteSending[groupPk]) { + state.membersInviteSending[groupPk] = []; + } else if (changeType === 'promote' && !state.membersPromoteSending[groupPk]) { + state.membersPromoteSending[groupPk] = []; + } + const arrRef = + changeType === 'invite' + ? state.membersInviteSending[groupPk] + : state.membersPromoteSending[groupPk]; + + const foundAt = arrRef.findIndex(p => p === pubkey); + + if (sending && foundAt === -1) { + arrRef.push(pubkey); + return state; + } + if (!sending && foundAt >= 0) { + arrRef.splice(foundAt, 1); + } + return state; +} + +function refreshConvosModelProps(convoIds: Array) { + /** + * + * This is not ideal, but some fields stored in this slice are ALSO stored in the conversation slice. Things like admins,members, groupName, kicked, etc... + * So, anytime a change is made in this metaGroup slice, we need to make sure the conversation slice is updated too. + * The way to update the conversation slice is to call `triggerUIRefresh` on the corresponding conversation object. + * Eventually, we will have a centralized state with libsession used across the app, and those slices will only expose data from the libsession state. + * + */ + setTimeout(() => { + convoIds.map(id => ConvoHub.use().get(id)).map(c => c?.triggerUIRefresh()); + }, 1000); +} + +/** + * This slice is the one holding the default joinable rooms fetched once in a while from the default opengroup v2 server. + */ +const metaGroupSlice = createSlice({ + name: 'metaGroup', + initialState: initialGroupState, + reducers: { + setInvitePending( + state: GroupState, + { payload }: PayloadAction<{ sending: boolean } & WithGroupPubkey & WithPubkey> + ) { + return applySendingStateChange({ changeType: 'invite', ...payload, state }); + }, + + setPromotionPending( + state: GroupState, + { payload }: PayloadAction<{ pubkey: PubkeyType; groupPk: GroupPubkeyType; sending: boolean }> + ) { + return applySendingStateChange({ changeType: 'promote', ...payload, state }); + }, + removeGroupDetailsFromSlice( + state: GroupState, + { payload }: PayloadAction<{ groupPk: GroupPubkeyType }> + ) { + delete state.infos[payload.groupPk]; + delete state.members[payload.groupPk]; + delete state.membersInviteSending[payload.groupPk]; + delete state.membersPromoteSending[payload.groupPk]; + }, + }, + extraReducers: builder => { + builder.addCase(initNewGroupInWrapper.fulfilled, (state, action) => { + const { groupPk, infos, members } = action.payload; + state.infos[groupPk] = infos; + state.members[groupPk] = members; + state.creationFromUIPending = false; + refreshConvosModelProps([groupPk]); + return state; + }); + builder.addCase(initNewGroupInWrapper.rejected, (state, action) => { + window.log.error('a initNewGroupInWrapper was rejected', action.error); + state.creationFromUIPending = false; + return state; + // FIXME delete the wrapper completely & corresponding dumps, and user groups entry? + }); + builder.addCase(initNewGroupInWrapper.pending, (state, _action) => { + state.creationFromUIPending = true; + + window.log.error('a initNewGroupInWrapper is pending'); + return state; + }); + builder.addCase(loadMetaDumpsFromDB.fulfilled, (state, action) => { + const loaded = action.payload; + loaded.forEach(element => { + state.infos[element.groupPk] = element.infos; + state.members[element.groupPk] = element.members; + }); + refreshConvosModelProps(loaded.map(m => m.groupPk)); + return state; + }); + builder.addCase(loadMetaDumpsFromDB.rejected, (state, action) => { + window.log.error('a loadMetaDumpsFromDB was rejected', action.error); + return state; + }); + builder.addCase(refreshGroupDetailsFromWrapper.fulfilled, (state, action) => { + const { infos, members, groupPk } = action.payload; + if (infos && members) { + state.infos[groupPk] = infos; + state.members[groupPk] = members; + if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { + window.log.info(`groupInfo after merge: ${stringify(infos)}`); + window.log.info(`groupMembers after merge: ${stringify(members)}`); + } + refreshConvosModelProps([groupPk]); + } else { + window.log.debug( + `refreshGroupDetailsFromWrapper no details found, removing from slice: ${groupPk}}` + ); + + deleteGroupPkEntriesFromState(state, groupPk); + } + return state; + }); + builder.addCase(refreshGroupDetailsFromWrapper.rejected, (_state, action) => { + window.log.error('a refreshGroupDetailsFromWrapper was rejected', action.error); + }); + + builder.addCase(handleUserGroupUpdate.fulfilled, (state, action) => { + const { infos, members, groupPk } = action.payload; + if (infos && members) { + state.infos[groupPk] = infos; + state.members[groupPk] = members; + refreshConvosModelProps([groupPk]); + if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { + window.log.info(`groupInfo after handleUserGroupUpdate: ${stringify(infos)}`); + window.log.info(`groupMembers after handleUserGroupUpdate: ${stringify(members)}`); + } + } else { + window.log.debug( + `handleUserGroupUpdate no details found, removing from slice: ${groupPk}}` + ); + + deleteGroupPkEntriesFromState(state, groupPk); + } + }); + builder.addCase(handleUserGroupUpdate.rejected, (_state, action) => { + window.log.error('a handleUserGroupUpdate was rejected', action.error); + }); + builder.addCase(currentDeviceGroupMembersChange.fulfilled, (state, action) => { + state.memberChangesFromUIPending = false; + + const { infos, members, groupPk } = action.payload; + state.infos[groupPk] = infos; + state.members[groupPk] = members; + refreshConvosModelProps([groupPk]); + if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { + window.log.info(`groupInfo after currentDeviceGroupMembersChange: ${stringify(infos)}`); + window.log.info( + `groupMembers after currentDeviceGroupMembersChange: ${stringify(members)}` + ); + } + }); + builder.addCase(currentDeviceGroupMembersChange.rejected, (state, action) => { + window.log.error('a currentDeviceGroupMembersChange was rejected', action.error); + state.memberChangesFromUIPending = false; + }); + builder.addCase(currentDeviceGroupMembersChange.pending, state => { + state.memberChangesFromUIPending = true; + }); + + /** currentDeviceGroupNameChange */ + builder.addCase(currentDeviceGroupNameChange.fulfilled, (state, action) => { + state.nameChangesFromUIPending = false; + + const { infos, members, groupPk } = action.payload; + state.infos[groupPk] = infos; + state.members[groupPk] = members; + refreshConvosModelProps([groupPk]); + if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { + window.log.info(`groupInfo after currentDeviceGroupNameChange: ${stringify(infos)}`); + window.log.info(`groupMembers after currentDeviceGroupNameChange: ${stringify(members)}`); + } + }); + builder.addCase(currentDeviceGroupNameChange.rejected, (state, action) => { + window.log.error(`a ${currentDeviceGroupNameChange.name} was rejected`, action.error); + state.nameChangesFromUIPending = false; + }); + builder.addCase(currentDeviceGroupNameChange.pending, state => { + state.nameChangesFromUIPending = true; + }); + + /** handleMemberLeftMessage */ + builder.addCase(handleMemberLeftMessage.fulfilled, (state, action) => { + const { infos, members, groupPk } = action.payload; + state.infos[groupPk] = infos; + state.members[groupPk] = members; + refreshConvosModelProps([groupPk]); + if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { + window.log.info(`groupInfo after handleMemberLeftMessage: ${stringify(infos)}`); + window.log.info(`groupMembers after handleMemberLeftMessage: ${stringify(members)}`); + } + }); + builder.addCase(handleMemberLeftMessage.rejected, (_state, action) => { + window.log.error('a handleMemberLeftMessage was rejected', action.error); + }); + + builder.addCase(inviteResponseReceived.fulfilled, (state, action) => { + const { infos, members, groupPk } = action.payload; + state.infos[groupPk] = infos; + state.members[groupPk] = members; + refreshConvosModelProps([groupPk]); + if (window.sessionFeatureFlags.debug.debugLibsessionDumps) { + window.log.info(`groupInfo after inviteResponseReceived: ${stringify(infos)}`); + window.log.info(`groupMembers after inviteResponseReceived: ${stringify(members)}`); + } + }); + builder.addCase(inviteResponseReceived.rejected, (_state, action) => { + window.log.error('a inviteResponseReceived was rejected', action.error); + }); + + // triggerFakeAvatarUpdate + builder.addCase(triggerFakeAvatarUpdate.fulfilled, () => { + window.log.error('a triggerFakeAvatarUpdate was fulfilled'); + }); + builder.addCase(triggerFakeAvatarUpdate.rejected, (_state, action) => { + window.log.error('a triggerFakeAvatarUpdate was rejected', action.error); + }); + }, +}); + +export const groupInfoActions = { + initNewGroupInWrapper, + loadMetaDumpsFromDB, + refreshGroupDetailsFromWrapper, + handleUserGroupUpdate, + currentDeviceGroupMembersChange, + inviteResponseReceived, + handleMemberLeftMessage, + currentDeviceGroupNameChange, + triggerFakeAvatarUpdate, + triggerFakeDeleteMsgBeforeNow, + + ...metaGroupSlice.actions, +}; +export const groupReducer = metaGroupSlice.reducer; + +async function scheduleGroupInviteJobs( + groupPk: GroupPubkeyType, + withHistory: Array, + withoutHistory: Array, + inviteAsAdmin: boolean +) { + const merged = uniq(concat(withHistory, withoutHistory)); + for (let index = 0; index < merged.length; index++) { + const member = merged[index]; + // Note: forceUnrevoke is false, because `scheduleGroupInviteJobs` is always called after we've done + // a batch unrevoke of all the members' pk + await GroupInvite.addJob({ groupPk, member, inviteAsAdmin, forceUnrevoke: false }); + } +} diff --git a/ts/state/ducks/modalDialog.tsx b/ts/state/ducks/modalDialog.tsx index 1b6c5bc69d..d662db86da 100644 --- a/ts/state/ducks/modalDialog.tsx +++ b/ts/state/ducks/modalDialog.tsx @@ -10,16 +10,19 @@ import type { EditProfilePictureModalProps, PasswordAction } from '../../types/R export type BanType = 'ban' | 'unban'; export type ConfirmModalState = SessionConfirmDialogProps | null; -export type InviteContactModalState = { conversationId: string } | null; -export type BanOrUnbanUserModalState = { - conversationId: string; - banType: BanType; - pubkey?: string; -} | null; + +type WithConvoId = { conversationId: string }; +export type InviteContactModalState = WithConvoId | null; +export type BanOrUnbanUserModalState = + | (WithConvoId & { + banType: BanType; + pubkey?: string; + }) + | null; export type AddModeratorsModalState = InviteContactModalState; export type RemoveModeratorsModalState = InviteContactModalState; export type UpdateGroupMembersModalState = InviteContactModalState; -export type UpdateGroupNameModalState = InviteContactModalState; +export type UpdateGroupNameModalState = WithConvoId | null; export type ChangeNickNameModalState = InviteContactModalState; export type EditProfileModalState = object | null; export type OnionPathModalState = EditProfileModalState; diff --git a/ts/state/ducks/userGroups.ts b/ts/state/ducks/userGroups.ts new file mode 100644 index 0000000000..4a24beee0b --- /dev/null +++ b/ts/state/ducks/userGroups.ts @@ -0,0 +1,35 @@ +/* eslint-disable no-await-in-loop */ +import { PayloadAction, createSlice } from '@reduxjs/toolkit'; +import { GroupPubkeyType, UserGroupsGet } from 'libsession_util_nodejs'; + +export type UserGroupState = { + userGroups: Record; +}; + +export const initialUserGroupState: UserGroupState = { + userGroups: {}, +}; + +const userGroupSlice = createSlice({ + name: 'userGroup', + initialState: initialUserGroupState, + + reducers: { + refreshUserGroupsSlice( + state: UserGroupState, + action: PayloadAction<{ groups: Array }> + ) { + state.userGroups = {}; + action.payload.groups.forEach(m => { + state.userGroups[m.pubkeyHex] = m; + }); + + return state; + }, + }, +}); + +export const userGroupsActions = { + ...userGroupSlice.actions, +}; +export const userGroupReducer = userGroupSlice.reducer; diff --git a/ts/state/reducer.ts b/ts/state/reducer.ts index 2e9b1a9d36..e20833166f 100644 --- a/ts/state/reducer.ts +++ b/ts/state/reducer.ts @@ -11,6 +11,7 @@ import { reducer as theme } from './ducks/theme'; import { reducer as user, UserStateType } from './ducks/user'; import { PrimaryColorStateType, ThemeStateType } from '../themes/constants/colors'; +import { groupReducer, GroupState } from './ducks/metaGroups'; import { modalReducer as modals, ModalState } from './ducks/modalDialog'; import { defaultOnionReducer as onionPaths, OnionState } from './ducks/onion'; import { settingsReducer, SettingsState } from './ducks/settings'; @@ -19,6 +20,7 @@ import { StagedAttachmentsStateType, } from './ducks/stagedAttachments'; import { userConfigReducer as userConfig, UserConfigState } from './ducks/userConfig'; +import { userGroupReducer, UserGroupState } from './ducks/userGroups'; export type StateType = { search: SearchStateType; @@ -35,6 +37,8 @@ export type StateType = { call: CallStateType; sogsRoomInfo: SogsRoomInfoState; settings: SettingsState; + groups: GroupState; + userGroups: UserGroupState; }; const reducers = { @@ -52,6 +56,8 @@ const reducers = { call, sogsRoomInfo: ReduxSogsRoomInfos.sogsRoomInfoReducer, settings: settingsReducer, + groups: groupReducer, + userGroups: userGroupReducer, }; // Making this work would require that our reducer signature supported AnyAction, not diff --git a/ts/state/selectors/conversations.ts b/ts/state/selectors/conversations.ts index 403541d6bc..5c9c2c4803 100644 --- a/ts/state/selectors/conversations.ts +++ b/ts/state/selectors/conversations.ts @@ -1,7 +1,9 @@ /* eslint-disable no-restricted-syntax */ + import { createSelector } from '@reduxjs/toolkit'; import { filter, isEmpty, isFinite, isNumber, pick, sortBy, toNumber } from 'lodash'; +import { useSelector } from 'react-redux'; import { ConversationLookupType, ConversationsStateType, @@ -24,7 +26,8 @@ import { MessageTextSelectorProps } from '../../components/conversation/message/ import { GenericReadableMessageSelectorProps } from '../../components/conversation/message/message-item/GenericReadableMessage'; import { hasValidIncomingRequestValues } from '../../models/conversation'; import { isOpenOrClosedGroup } from '../../models/conversationAttributes'; -import { getConversationController } from '../../session/conversations'; +import { ConvoHub } from '../../session/conversations'; + import { UserUtils } from '../../session/utils'; import { BlockedNumberController } from '../../util'; import { Storage } from '../../util/storage'; @@ -32,11 +35,12 @@ import { getIntl } from './user'; import { MessageReactsSelectorProps } from '../../components/conversation/message/message-content/MessageReactions'; import { processQuoteAttachment } from '../../models/message'; +import { CONVERSATION_PRIORITIES } from '../../models/types'; import { isUsAnySogsFromCache } from '../../session/apis/open_group_api/sogsv3/knownBlindedkeys'; import { PubKey } from '../../session/types'; +import { UserGroupsWrapperActions } from '../../webworker/workers/browser/libsession_worker_interface'; import { getSelectedConversationKey } from './selectedConversation'; import { getModeratorsOutsideRedux } from './sogsRoomInfo'; -import { CONVERSATION_PRIORITIES } from '../../models/types'; export const getConversations = (state: StateType): ConversationsStateType => state.conversations; @@ -71,7 +75,7 @@ export const getSortedMessagesOfSelectedConversation = createSelector( } const convoId = messages[0].propsForMessage.convoId; - const convo = getConversationController().get(convoId); + const convo = ConvoHub.use().get(convoId); if (!convo) { return []; @@ -91,6 +95,13 @@ export const hasSelectedConversationIncomingMessages = createSelector( } ); +export const hasSelectedConversationOutgoingMessages = createSelector( + getSortedMessagesOfSelectedConversation, + (messages: Array): boolean => { + return messages.some(m => m.propsForMessage.direction === 'outgoing'); + } +); + export const getFirstUnreadMessageId = (state: StateType): string | undefined => { return state.conversations.firstUnreadMessageId; }; @@ -273,6 +284,13 @@ const _getLeftPaneConversationIds = ( return false; } + if ( + PubKey.is03Pubkey(conversation.id) && + UserGroupsWrapperActions.getCachedGroup(conversation.id)?.invitePending + ) { + return false; + } + // a non private conversation is always returned here if (!conversation.isPrivate) { return true; @@ -321,7 +339,7 @@ const _getGlobalUnreadCount = (sortedConversations: Array conversation.priority && conversation.priority <= CONVERSATION_PRIORITIES.default ) { - // dont increase unread counter, don't push to convo list. + // don't increase unread counter, don't push to convo list. continue; } @@ -347,7 +365,7 @@ export const _getSortedConversations = ( const sortedConversations: Array = []; for (const conversation of sorted) { - // Remove all invalid conversations and conversatons of devices associated + // Remove all invalid conversations and conversations of devices associated // with cancelled attempted links if (!conversation.isPublic && !conversation.activeAt) { continue; @@ -380,37 +398,38 @@ const _getConversationRequests = ( sortedConversations: Array ): Array => { return filter(sortedConversations, conversation => { - const { isApproved, isBlocked, isPrivate, isMe, activeAt, didApproveMe } = conversation; + const { isApproved, isBlocked, isPrivate, isMe, activeAt, didApproveMe, id } = conversation; + const invitePending = PubKey.is03Pubkey(id) + ? UserGroupsWrapperActions.getCachedGroup(id)?.invitePending || false + : false; const isIncomingRequest = hasValidIncomingRequestValues({ + id, isApproved: isApproved || false, isBlocked: isBlocked || false, isPrivate: isPrivate || false, isMe: isMe || false, activeAt: activeAt || 0, didApproveMe: didApproveMe || false, + invitePending, }); return isIncomingRequest; }); }; -export const getConversationRequests = createSelector( - getSortedConversations, - _getConversationRequests -); +const getConversationRequests = createSelector(getSortedConversations, _getConversationRequests); export const getConversationRequestsIds = createSelector(getConversationRequests, requests => requests.map(m => m.id) ); -export const hasConversationRequests = (state: StateType) => { - return !!getConversationRequests(state).length; -}; - const _getUnreadConversationRequests = ( sortedConversationRequests: Array ): Array => { return filter(sortedConversationRequests, conversation => { - return Boolean(conversation && conversation.unreadCount && conversation.unreadCount > 0); + return Boolean( + conversation && + ((conversation.unreadCount && conversation.unreadCount > 0) || conversation.isMarkedUnread) + ); }); }; @@ -432,6 +451,10 @@ export const getLeftPaneConversationIds = createSelector( _getLeftPaneConversationIds ); +export const useContactsToInviteToGroup = () => { + const contacts = useSelector(getPrivateContactsPubkeys); + return contacts; +}; export const getLeftPaneConversationIdsCount = createSelector( getLeftPaneConversationIds, (convoIds: Array) => { @@ -528,10 +551,11 @@ export const getPrivateContactsPubkeys = createSelector(getSortedContacts, state state.map(m => m.id) ); -export const getGlobalUnreadMessageCount = createSelector( - getSortedConversations, - _getGlobalUnreadCount -); +const getGlobalUnreadMessageCount = createSelector(getSortedConversations, _getGlobalUnreadCount); + +export function useGlobalUnreadMessageCount() { + return useSelector(getGlobalUnreadMessageCount); +} export const getMessageInfoId = (state: StateType) => state.conversations.messageInfoId; @@ -667,8 +691,8 @@ export function getLoadedMessagesLength(state: StateType) { return getMessagesFromState(state).length; } -export function getSelectedHasMessages(state: StateType): boolean { - return !isEmpty(getMessagesFromState(state)); +export function useSelectedHasMessages(): boolean { + return useSelector((state: StateType) => !isEmpty(getMessagesFromState(state))); } export const isFirstUnreadMessageIdAbove = createSelector( @@ -1020,3 +1044,9 @@ export const getIsSelectedConvoInitialLoadingInProgress = (state: StateType): bo export function getCurrentlySelectedConversationOutsideRedux() { return window?.inboxStore?.getState().conversations.selectedConversation as string | undefined; } + +export function useConversationIdOrigin(convoId: string | undefined) { + return useSelector((state: StateType) => + convoId ? state.conversations.conversationLookup?.[convoId]?.conversationIdOrigin : undefined + ); +} diff --git a/ts/state/selectors/groups.ts b/ts/state/selectors/groups.ts new file mode 100644 index 0000000000..904adb4574 --- /dev/null +++ b/ts/state/selectors/groups.ts @@ -0,0 +1,341 @@ +import { + GroupMemberGet, + GroupPubkeyType, + MemberStateGroupV2, + PubkeyType, +} from 'libsession_util_nodejs'; +import { useSelector } from 'react-redux'; +import { compact, concat, differenceBy, sortBy, uniqBy } from 'lodash'; +import { PubKey } from '../../session/types'; +import { GroupState } from '../ducks/metaGroups'; +import { StateType } from '../reducer'; +import { assertUnreachable } from '../../types/sqlSharedTypes'; +import { UserUtils } from '../../session/utils'; +import { useConversationsNicknameRealNameOrShortenPubkey } from '../../hooks/useParamSelector'; + +const getLibGroupsState = (state: StateType): GroupState => state.groups; +const getInviteSendingState = (state: StateType) => getLibGroupsState(state).membersInviteSending; +const getPromoteSendingState = (state: StateType) => getLibGroupsState(state).membersPromoteSending; + +function getMembersOfGroup(state: StateType, convo?: string): Array { + if (!convo) { + return []; + } + if (!PubKey.is03Pubkey(convo)) { + return []; + } + + const members = getLibGroupsState(state).members[convo]; + return members || []; +} + +function findMemberInMembers(members: Array, memberPk: string) { + return members.find(m => m.pubkeyHex === memberPk); +} + +export function getLibMembersPubkeys(state: StateType, convo?: string): Array { + const members = getMembersOfGroup(state, convo); + + return members.map(m => m.pubkeyHex); +} + +function getIsCreatingGroupFromUI(state: StateType): boolean { + return getLibGroupsState(state).creationFromUIPending; +} + +function getIsMemberGroupChangePendingFromUI(state: StateType): boolean { + return getLibGroupsState(state).memberChangesFromUIPending; +} + +function getGroupNameChangeFromUIPending(state: StateType): boolean { + return getLibGroupsState(state).nameChangesFromUIPending; +} + +export function getLibAdminsPubkeys(state: StateType, convo?: string): Array { + const members = getMembersOfGroup(state, convo); + return members.filter(m => m.nominatedAdmin).map(m => m.pubkeyHex); +} + +function getMemberInviteFailed(state: StateType, pubkey: PubkeyType, convo?: GroupPubkeyType) { + const members = getMembersOfGroup(state, convo); + return findMemberInMembers(members, pubkey)?.memberStatus === 'INVITE_FAILED' || false; +} + +function getMemberInviteNotSent(state: StateType, pubkey: PubkeyType, convo?: GroupPubkeyType) { + const members = getMembersOfGroup(state, convo); + return findMemberInMembers(members, pubkey)?.memberStatus === 'INVITE_NOT_SENT' || false; +} + +function getMemberInviteSent(state: StateType, pubkey: PubkeyType, convo?: GroupPubkeyType) { + const members = getMembersOfGroup(state, convo); + + return findMemberInMembers(members, pubkey)?.memberStatus === 'INVITE_SENT' || false; +} + +function getMemberHasAcceptedPromotion( + state: StateType, + pubkey: PubkeyType, + convo?: GroupPubkeyType +) { + const members = getMembersOfGroup(state, convo); + return findMemberInMembers(members, pubkey)?.memberStatus === 'PROMOTION_ACCEPTED' || false; +} + +function getMemberIsNominatedAdmin(state: StateType, pubkey: PubkeyType, convo?: GroupPubkeyType) { + const members = getMembersOfGroup(state, convo); + return findMemberInMembers(members, pubkey)?.nominatedAdmin || false; +} + +function getMemberHasAcceptedInvite(state: StateType, pubkey: PubkeyType, convo?: GroupPubkeyType) { + const members = getMembersOfGroup(state, convo); + return findMemberInMembers(members, pubkey)?.memberStatus === 'INVITE_ACCEPTED' || false; +} + +function getMemberPromotionFailed(state: StateType, pubkey: PubkeyType, convo?: GroupPubkeyType) { + const members = getMembersOfGroup(state, convo); + return findMemberInMembers(members, pubkey)?.memberStatus === 'PROMOTION_FAILED' || false; +} + +function getMemberPromotionSent(state: StateType, pubkey: PubkeyType, convo?: GroupPubkeyType) { + const members = getMembersOfGroup(state, convo); + return findMemberInMembers(members, pubkey)?.memberStatus === 'PROMOTION_SENT' || false; +} + +function getMemberPromotionNotSent(state: StateType, pubkey: PubkeyType, convo?: GroupPubkeyType) { + const members = getMembersOfGroup(state, convo); + return findMemberInMembers(members, pubkey)?.memberStatus === 'PROMOTION_NOT_SENT' || false; +} + +export function getLibMembersCount(state: StateType, convo?: GroupPubkeyType): Array { + return getLibMembersPubkeys(state, convo); +} + +function getLibGroupName(state: StateType, convo?: string): string | undefined { + if (!convo) { + return undefined; + } + if (!PubKey.is03Pubkey(convo)) { + return undefined; + } + + const name = getLibGroupsState(state).infos[convo]?.name; + return name || undefined; +} + +export function useLibGroupName(convoId?: string): string | undefined { + return useSelector((state: StateType) => getLibGroupName(state, convoId)); +} + +export function useLibGroupMembers(convoId?: string): Array { + return useSelector((state: StateType) => getLibMembersPubkeys(state, convoId)); +} + +export function useLibGroupAdmins(convoId?: string): Array { + return useSelector((state: StateType) => getLibAdminsPubkeys(state, convoId)); +} + +export function getLibGroupNameOutsideRedux(convoId: string): string | undefined { + const state = window.inboxStore?.getState(); + return state ? getLibGroupName(state, convoId) : undefined; +} + +export function getLibGroupMembersOutsideRedux(convoId: string): Array { + const state = window.inboxStore?.getState(); + return state ? getLibMembersPubkeys(state, convoId) : []; +} + +export function getLibGroupAdminsOutsideRedux(convoId: string): Array { + const state = window.inboxStore?.getState(); + return state ? getLibAdminsPubkeys(state, convoId) : []; +} + +export function getMemberInviteSentOutsideRedux( + member: PubkeyType, + convoId: GroupPubkeyType +): boolean { + const state = window.inboxStore?.getState(); + return state ? getMemberInviteSent(state, member, convoId) : false; +} + +export function useIsCreatingGroupFromUIPending() { + return useSelector(getIsCreatingGroupFromUI); +} + +export function useMemberInviteFailed(member: PubkeyType, groupPk: GroupPubkeyType) { + return useSelector((state: StateType) => getMemberInviteFailed(state, member, groupPk)); +} + +export function useMemberInviteSent(member: PubkeyType, groupPk: GroupPubkeyType) { + return useSelector((state: StateType) => getMemberInviteSent(state, member, groupPk)); +} + +export function useMemberInviteNotSent(member: PubkeyType, groupPk: GroupPubkeyType) { + return useSelector((state: StateType) => getMemberInviteNotSent(state, member, groupPk)); +} + +export function useMemberHasAcceptedPromotion(member: PubkeyType, groupPk: GroupPubkeyType) { + return useSelector((state: StateType) => getMemberHasAcceptedPromotion(state, member, groupPk)); +} + +export function useMemberIsNominatedAdmin(member: PubkeyType, groupPk: GroupPubkeyType) { + return useSelector((state: StateType) => getMemberIsNominatedAdmin(state, member, groupPk)); +} + +export function useMemberHasAcceptedInvite(member: PubkeyType, groupPk: GroupPubkeyType) { + return useSelector((state: StateType) => getMemberHasAcceptedInvite(state, member, groupPk)); +} + +export function useMemberPromotionFailed(member: PubkeyType, groupPk: GroupPubkeyType) { + return useSelector((state: StateType) => getMemberPromotionFailed(state, member, groupPk)); +} + +export function useMemberPromotionSent(member: PubkeyType, groupPk: GroupPubkeyType) { + return useSelector((state: StateType) => getMemberPromotionSent(state, member, groupPk)); +} + +export function useMemberPromotionNotSent(member: PubkeyType, groupPk: GroupPubkeyType) { + return useSelector((state: StateType) => getMemberPromotionNotSent(state, member, groupPk)); +} + +export function useMemberGroupChangePending() { + return useSelector(getIsMemberGroupChangePendingFromUI); +} + +export function useGroupNameChangeFromUIPending() { + return useSelector(getGroupNameChangeFromUIPending); +} + +/** + * The selectors above are all deriving data from libsession. + * There is also some data that we only need in memory, not part of libsession (and so unsaved). + * An example is the "sending invite" or "sending promote" state of a member in a group. + */ + +function useMembersInviteSending(groupPk?: string) { + return useSelector((state: StateType) => + groupPk && PubKey.is03Pubkey(groupPk) ? getInviteSendingState(state)[groupPk] || [] : [] + ); +} + +export function useMemberInviteSending(groupPk: GroupPubkeyType, memberPk: PubkeyType) { + return useMembersInviteSending(groupPk).includes(memberPk); +} + +function useMembersPromoteSending(groupPk?: string) { + return useSelector((state: StateType) => + groupPk && PubKey.is03Pubkey(groupPk) ? getPromoteSendingState(state)[groupPk] || [] : [] + ); +} + +export function useMemberPromoteSending(groupPk: GroupPubkeyType, memberPk: PubkeyType) { + return useMembersPromoteSending(groupPk).includes(memberPk); +} + +type MemberStateGroupV2WithSending = MemberStateGroupV2 | 'INVITE_SENDING' | 'PROMOTION_SENDING'; + +export function useStateOf03GroupMembers(convoId?: string) { + const us = UserUtils.getOurPubKeyStrFromCache(); + let unsortedMembers = useSelector((state: StateType) => getMembersOfGroup(state, convoId)); + const invitesSendingPk = useMembersInviteSending(convoId); + const promotionsSendingPk = useMembersPromoteSending(convoId); + let invitesSending = compact( + invitesSendingPk.map(sending => unsortedMembers.find(m => m.pubkeyHex === sending)) + ); + const promotionSending = compact( + promotionsSendingPk.map(sending => unsortedMembers.find(m => m.pubkeyHex === sending)) + ); + + // promotionSending has priority against invitesSending, so removing anything in invitesSending found in promotionSending + invitesSending = differenceBy(invitesSending, promotionSending, value => value.pubkeyHex); + + const bothSending = concat(promotionSending, invitesSending); + + // promotionSending and invitesSending has priority against anything else, so remove anything found in one of those two + // from the unsorted list of members + unsortedMembers = differenceBy(unsortedMembers, bothSending, value => value.pubkeyHex); + + // at this point, merging invitesSending, promotionSending and unsortedMembers should create an array of unique members + const sortedByPriorities = concat(bothSending, unsortedMembers); + if (sortedByPriorities.length !== uniqBy(sortedByPriorities, m => m.pubkeyHex).length) { + throw new Error( + 'merging invitesSending, promotionSending and unsortedMembers should create an array of unique members' + ); + } + + // This could have been done now with a `sortedByPriorities.map()` call, + // but we don't want the order as sorted by `sortedByPriorities`, **only** to respect the priorities from it. + // What that means is that a member with a state as inviteSending, should have that state, but not be sorted first. + + // The order we (for now) want is: + // - (Invite failed + Invite Not Sent) merged together, sorted as NameSortingOrder + // - Sending invite, sorted as NameSortingOrder + // - Invite sent, sorted as NameSortingOrder + // - (Promotion failed + Promotion Not Sent) merged together, sorted as NameSortingOrder + // - Sending invite, sorted as NameSortingOrder + // - Invite sent, sorted as NameSortingOrder + // - Admin, sorted as NameSortingOrder + // - Accepted Member, sorted as NameSortingOrder + // NameSortingOrder: You first, then "nickname || name || pubkey -> aA-zZ" + + const unsortedWithStatuses: Array< + Pick & { memberStatus: MemberStateGroupV2WithSending } + > = []; + unsortedWithStatuses.push(...promotionSending); + unsortedWithStatuses.push(...differenceBy(invitesSending, promotionSending)); + unsortedWithStatuses.push(...differenceBy(unsortedMembers, invitesSending, promotionSending)); + + const names = useConversationsNicknameRealNameOrShortenPubkey( + unsortedWithStatuses.map(m => m.pubkeyHex) + ); + + // needing an index like this outside of lodash is not pretty, + // but sortBy doesn't provide the index in the callback + let index = 0; + + const sorted = sortBy(unsortedWithStatuses, item => { + let stateSortingOrder = 0; + switch (item.memberStatus) { + case 'INVITE_FAILED': + case 'INVITE_NOT_SENT': + stateSortingOrder = -5; + break; + case 'INVITE_SENDING': + stateSortingOrder = -4; + break; + case 'INVITE_SENT': + stateSortingOrder = -3; + break; + case 'PROMOTION_FAILED': + case 'PROMOTION_NOT_SENT': + stateSortingOrder = -2; + break; + case 'PROMOTION_SENDING': + stateSortingOrder = -1; + break; + case 'PROMOTION_SENT': + stateSortingOrder = 0; + break; + case 'PROMOTION_ACCEPTED': + stateSortingOrder = 1; + break; + case 'INVITE_ACCEPTED': + stateSortingOrder = 2; + break; + case 'UNKNOWN': + stateSortingOrder = 5; // just a fallback, hopefully won't happen in production + break; + + default: + assertUnreachable(item.memberStatus, 'Unhandled switch case'); + } + const sortingOrder = [ + stateSortingOrder, + // per section, we want "us first", then "nickname || displayName || pubkey" + item.pubkeyHex === us ? -1 : names[index]?.toLocaleLowerCase(), + ]; + index++; + return sortingOrder; + }); + + return sorted; +} diff --git a/ts/state/selectors/modal.ts b/ts/state/selectors/modal.ts index 3a6c5f7e9f..bfb0913b32 100644 --- a/ts/state/selectors/modal.ts +++ b/ts/state/selectors/modal.ts @@ -27,7 +27,7 @@ export const getModal = (state: StateType): ModalState => { return state.modals; }; -export const getIsModalVisble = createSelector(getModal, (state: ModalState): boolean => { +export const getIsModalVisible = createSelector(getModal, (state: ModalState): boolean => { const modalValues = Object.values(state); for (let i = 0; i < modalValues.length; i++) { if (modalValues[i] !== null) { diff --git a/ts/state/selectors/selectedConversation.ts b/ts/state/selectors/selectedConversation.ts index 127100d0e9..b981bc04ec 100644 --- a/ts/state/selectors/selectedConversation.ts +++ b/ts/state/selectors/selectedConversation.ts @@ -15,7 +15,9 @@ import { getSelectedConversation, getSelectedMessageIds, } from './conversations'; +import { getLibMembersPubkeys, useLibGroupName } from './groups'; import { getCanWrite, getModerators, getSubscriberCount } from './sogsRoomInfo'; +import { getLibGroupDestroyed, useLibGroupDestroyed } from './userGroups'; const getIsSelectedPrivate = (state: StateType): boolean => { return Boolean(getSelectedConversation(state)?.isPrivate) || false; @@ -25,10 +27,6 @@ const getIsSelectedBlocked = (state: StateType): boolean => { return Boolean(getSelectedConversation(state)?.isBlocked) || false; }; -const getSelectedIsApproved = (state: StateType): boolean => { - return Boolean(getSelectedConversation(state)?.isApproved) || false; -}; - const getSelectedApprovedMe = (state: StateType): boolean => { return Boolean(getSelectedConversation(state)?.didApproveMe) || false; }; @@ -60,6 +58,7 @@ export const getSelectedConversationIsPublic = (state: StateType): boolean => { */ export function getSelectedCanWrite(state: StateType) { const selectedConvoPubkey = getSelectedConversationKey(state); + const isSelectedGroupDestroyed = getLibGroupDestroyed(state, selectedConvoPubkey); if (!selectedConvoPubkey) { return false; } @@ -68,16 +67,16 @@ export function getSelectedCanWrite(state: StateType) { return false; } const canWriteSogs = getCanWrite(state, selectedConvoPubkey); - const { isBlocked, isKickedFromGroup, left, isPublic } = selectedConvo; + const { isBlocked, isKickedFromGroup, isPublic } = selectedConvo; const readOnlySogs = isPublic && !canWriteSogs; - const isBlindedAndDisabledMsgRequests = getSelectedBlindedDisabledMsgRequests(state); // true if isPrivate, blinded and explicitely disabled msgreq + const isBlindedAndDisabledMsgRequests = getSelectedBlindedDisabledMsgRequests(state); // true if isPrivate, blinded and explicitly disabled msgreq return !( isBlocked || isKickedFromGroup || - left || + isSelectedGroupDestroyed || readOnlySogs || isBlindedAndDisabledMsgRequests ); @@ -121,12 +120,18 @@ const getSelectedConversationIsGroupOrCommunity = (state: StateType): boolean => return type ? isOpenOrClosedGroup(type) : false; }; +/** + * Returns true if the current conversation selected is a group conversation. + * Returns false if the current conversation selected is not a group conversation, or none are selected + */ const getSelectedConversationIsGroupV2 = (state: StateType): boolean => { const selected = getSelectedConversation(state); if (!selected || !selected.type) { return false; } - return selected.type === ConversationTypeEnum.GROUPV3; + return selected.type + ? selected.type === ConversationTypeEnum.GROUPV2 && PubKey.is03Pubkey(selected.id) + : false; }; /** @@ -139,17 +144,32 @@ export const isClosedGroupConversation = (state: StateType): boolean => { } return ( (selected.type === ConversationTypeEnum.GROUP && !selected.isPublic) || - selected.type === ConversationTypeEnum.GROUPV3 || + selected.type === ConversationTypeEnum.GROUPV2 || false ); }; -const getSelectedGroupMembers = (state: StateType): Array => { +const getSelectedMembersCount = (state: StateType): number => { + const selected = getSelectedConversation(state); + if (!selected) { + return 0; + } + if (PubKey.is03Pubkey(selected.id)) { + return getLibMembersPubkeys(state, selected.id).length || 0; + } + if (selected.isPrivate || selected.isPublic) { + return 0; + } + return selected.members?.length || 0; +}; + +const getSelectedGroupAdmins = (state: StateType): Array => { const selected = getSelectedConversation(state); if (!selected) { return []; } - return selected.members || []; + + return selected.groupAdmins || []; }; const getSelectedSubscriberCount = (state: StateType): number | undefined => { @@ -243,7 +263,9 @@ export function useSelectedIsBlocked() { } export function useSelectedIsApproved() { - return useSelector(getSelectedIsApproved); + return useSelector((state: StateType): boolean => { + return !!(getSelectedConversation(state)?.isApproved || false); + }); } export function useSelectedApprovedMe() { @@ -296,8 +318,12 @@ export function useSelectedIsNoteToSelf() { return useSelector(getIsSelectedNoteToSelf); } -export function useSelectedMembers() { - return useSelector(getSelectedGroupMembers); +export function useSelectedMembersCount() { + return useSelector(getSelectedMembersCount); +} + +export function useSelectedGroupAdmins() { + return useSelector(getSelectedGroupAdmins); } export function useSelectedSubscriberCount() { @@ -310,6 +336,11 @@ export function useSelectedIsKickedFromGroup() { ); } +export function useSelectedIsGroupDestroyed() { + const convoKey = useSelectedConversationKey(); + return useLibGroupDestroyed(convoKey); +} + export function useSelectedExpireTimer(): number | undefined { return useSelector((state: StateType) => getSelectedConversation(state)?.expireTimer); } @@ -320,8 +351,8 @@ export function useSelectedConversationDisappearingMode(): return useSelector((state: StateType) => getSelectedConversation(state)?.expirationMode); } -export function useSelectedIsLeft() { - return useSelector((state: StateType) => Boolean(getSelectedConversation(state)?.left) || false); +export function useSelectedConversationIdOrigin() { + return useSelector((state: StateType) => getSelectedConversation(state)?.conversationIdOrigin); } export function useSelectedNickname() { @@ -354,13 +385,18 @@ export function useSelectedShortenedPubkeyOrFallback() { * This also returns the localized "Note to Self" if the conversation is the note to self. */ export function useSelectedNicknameOrProfileNameOrShortenedPubkey() { + const selectedId = useSelectedConversationKey(); const nickname = useSelectedNickname(); const profileName = useSelectedDisplayNameInProfile(); const shortenedPubkey = useSelectedShortenedPubkeyOrFallback(); const isMe = useSelectedIsNoteToSelf(); + const libGroupName = useLibGroupName(selectedId); if (isMe) { return window.i18n('noteToSelf'); } + if (selectedId && PubKey.is03Pubkey(selectedId)) { + return libGroupName || profileName || shortenedPubkey; + } return nickname || profileName || shortenedPubkey; } diff --git a/ts/state/selectors/user.ts b/ts/state/selectors/user.ts index adbc4b9581..8cc25493b0 100644 --- a/ts/state/selectors/user.ts +++ b/ts/state/selectors/user.ts @@ -1,15 +1,17 @@ import { createSelector } from '@reduxjs/toolkit'; +import { PubkeyType } from 'libsession_util_nodejs'; +import { useSelector } from 'react-redux'; import { LocalizerType } from '../../types/Util'; -import { StateType } from '../reducer'; import { UserStateType } from '../ducks/user'; +import { StateType } from '../reducer'; export const getUser = (state: StateType): UserStateType => state.user; export const getOurNumber = createSelector( getUser, - (state: UserStateType): string => state.ourNumber + (state: UserStateType): PubkeyType => state.ourNumber as PubkeyType ); export const getOurDisplayNameInProfile = createSelector( @@ -18,3 +20,7 @@ export const getOurDisplayNameInProfile = createSelector( ); export const getIntl = createSelector(getUser, (): LocalizerType => window.i18n); + +export function useOurPkStr() { + return useSelector((state: StateType) => getOurNumber(state)); +} diff --git a/ts/state/selectors/userGroups.ts b/ts/state/selectors/userGroups.ts new file mode 100644 index 0000000000..6d3b3db12d --- /dev/null +++ b/ts/state/selectors/userGroups.ts @@ -0,0 +1,49 @@ +import { useSelector } from 'react-redux'; +import { isEmpty } from 'lodash'; +import { PubKey } from '../../session/types'; +import { UserGroupState } from '../ducks/userGroups'; +import { StateType } from '../reducer'; + +const getUserGroupState = (state: StateType): UserGroupState => state.userGroups; + +const getGroupById = (state: StateType, convoId?: string) => { + return convoId && PubKey.is03Pubkey(convoId) + ? getUserGroupState(state).userGroups[convoId] + : undefined; +}; + +export function useLibGroupWeHaveSecretKey(convoId?: string) { + return useSelector((state: StateType) => { + return !isEmpty(getGroupById(state, convoId)?.secretKey); + }); +} + +export function useLibGroupInvitePending(convoId?: string) { + return useSelector((state: StateType) => getGroupById(state, convoId)?.invitePending); +} + +export function useLibGroupInviteGroupName(convoId?: string) { + return useSelector((state: StateType) => getGroupById(state, convoId)?.name); +} + +function getLibGroupKicked(state: StateType, convoId?: string) { + return getGroupById(state, convoId)?.kicked; +} + +export function useLibGroupKicked(convoId?: string) { + return useSelector((state: StateType) => getLibGroupKicked(state, convoId)); +} + +export function getLibGroupKickedOutsideRedux(convoId?: string) { + const state = window.inboxStore?.getState(); + + return state ? getLibGroupKicked(state, convoId) : undefined; +} + +export function getLibGroupDestroyed(state: StateType, convoId?: string) { + return getGroupById(state, convoId)?.destroyed; +} + +export function useLibGroupDestroyed(convoId?: string) { + return useSelector((state: StateType) => getLibGroupDestroyed(state, convoId)); +} diff --git a/ts/test/session/unit/crypto/MessageEncrypter_test.ts b/ts/test/session/unit/crypto/MessageEncrypter_test.ts index d7658024d5..6ebd17608a 100644 --- a/ts/test/session/unit/crypto/MessageEncrypter_test.ts +++ b/ts/test/session/unit/crypto/MessageEncrypter_test.ts @@ -1,18 +1,20 @@ -import * as crypto from 'crypto'; +/* eslint-disable import/order */ import chai, { expect } from 'chai'; -import Sinon, * as sinon from 'sinon'; import chaiBytes from 'chai-bytes'; +import * as crypto from 'crypto'; +import Sinon, * as sinon from 'sinon'; -import { concatUInt8Array, getSodiumRenderer, MessageEncrypter } from '../../../../session/crypto'; -import { TestUtils } from '../../../test-utils'; import { SignalService } from '../../../../protobuf'; +import { concatUInt8Array, getSodiumRenderer } from '../../../../session/crypto'; +import { TestUtils } from '../../../test-utils'; import { StringUtils, UserUtils } from '../../../../session/utils'; +import { SessionKeyPair } from '../../../../receiver/keypairs'; +import { addMessagePadding } from '../../../../session/crypto/BufferPadding'; import { PubKey } from '../../../../session/types'; import { fromHex, toHex } from '../../../../session/utils/String'; -import { addMessagePadding } from '../../../../session/crypto/BufferPadding'; -import { SessionKeyPair } from '../../../../receiver/keypairs'; +import { MessageEncrypter } from '../../../../session/crypto/MessageEncrypter'; export const TEST_identityKeyPair: SessionKeyPair = { pubKey: new Uint8Array([ @@ -32,7 +34,7 @@ export const TEST_identityKeyPair: SessionKeyPair = { chai.use(chaiBytes); describe('MessageEncrypter', () => { - const ourNumber = '0123456789abcdef'; + const ourNumber = TestUtils.generateFakePubKeyStr(); const ourUserEd25516Keypair = { pubKey: '37e1631b002de498caf7c5c1712718bde7f257c6dadeed0c21abf5e939e6c309', privKey: diff --git a/ts/test/session/unit/crypto/OpenGroupAuthentication_test.ts b/ts/test/session/unit/crypto/OpenGroupAuthentication_test.ts index c5af953d9e..389c8a1bd7 100644 --- a/ts/test/session/unit/crypto/OpenGroupAuthentication_test.ts +++ b/ts/test/session/unit/crypto/OpenGroupAuthentication_test.ts @@ -299,9 +299,6 @@ const decryptBlindedMessage = async ( const version = data[0]; if (version !== 0) { - window?.log?.error( - 'decryptBlindedMessage - Dropping message due to unsupported encryption version' - ); return undefined; } @@ -319,7 +316,7 @@ const decryptBlindedMessage = async ( if (plaintextIncoming.length <= 32) { // throw Error; - window?.log?.error('decryptBlindedMessage: plaintext insufficient length'); + console.error('decryptBlindedMessage: plaintext insufficient length'); return undefined; } diff --git a/ts/test/session/unit/crypto/SnodeSignatures_test.ts b/ts/test/session/unit/crypto/SnodeSignatures_test.ts new file mode 100644 index 0000000000..469dda2c9d --- /dev/null +++ b/ts/test/session/unit/crypto/SnodeSignatures_test.ts @@ -0,0 +1,425 @@ +import { expect, use } from 'chai'; +import chaiAsPromised from 'chai-as-promised'; +import { UserGroupsGet } from 'libsession_util_nodejs'; +import Sinon from 'sinon'; +import { HexString } from '../../../../node/hexStrings'; +import { getSodiumNode } from '../../../../node/sodiumNode'; +import { SnodeNamespaces } from '../../../../session/apis/snode_api/namespaces'; +import { SnodeGroupSignature } from '../../../../session/apis/snode_api/signature/groupSignature'; +import { SnodeSignature } from '../../../../session/apis/snode_api/signature/snodeSignatures'; +import { concatUInt8Array } from '../../../../session/crypto'; +import { UserUtils } from '../../../../session/utils'; +import { fromBase64ToArray, fromHexToArray } from '../../../../session/utils/String'; +import { NetworkTime } from '../../../../util/NetworkTime'; +import { WithSignature } from '../../../../session/types/with'; + +use(chaiAsPromised); + +const validGroupPk = '030442ca9b758eefe0c42370696688b28f48f44bf44941fae4f3d5b41f6358c41d'; +const privKeyUint = concatUInt8Array( + fromHexToArray('4db38882cf0a0fffcbb971eb2b1420c92bc836c6946cd97bdc0c2787b806549d'), + fromHexToArray(validGroupPk.slice(2)) +); // len 64 + +const userEd25519Keypair = { + pubKey: 'bdd5eaf00eaf965ca63b7e8b119d8122d4647ffd5bb58daa1f78dfc54dd53989', + privKey: + 'b0e12943e22e8f71774c2c4205fed59800000000000000000000000000000000bdd5eaf00eaf965ca63b7e8b119d8122d4647ffd5bb58daa1f78dfc54dd53989', +}; + +// Keep the line below as we might need it for tests, and it is linked to the values above +// const _currentUserSubAccountAuthData = fromHexToArray( +// eslint-disable-next-line max-len +// '03030000cdbc07f46c4b322767675240d5945e902c75f0d3c46f36735b93773577d69e037c5d75d378a8e7183f9012b39bc27de7f81afe9c7000aa924fbcad8a7e6f12fec809adae65a1c427feb9c4b1ad453df403079f62203aa0563533b2b114f31b07' +// ); + +function getEmptyUserGroup() { + return { + secretKey: null, + authData: null, + invitePending: false, + joinedAtSeconds: 1234, + kicked: false, + name: '1243', + priority: 0, + pubkeyHex: validGroupPk, + destroyed: false, + } as UserGroupsGet; +} + +const hardcodedTimestamp = 1234; + +async function verifySig(ret: WithSignature & { pubkey: string }, verificationData: string) { + const without03 = + ret.pubkey.startsWith('03') || ret.pubkey.startsWith('05') ? ret.pubkey.slice(2) : ret.pubkey; + const pk = HexString.fromHexString(without03); + const sodium = await getSodiumNode(); + const verified = sodium.crypto_sign_verify_detached( + fromBase64ToArray(ret.signature), + verificationData, + pk + ); + + if (!verified) { + throw new Error('sig failed to be verified'); + } +} + +describe('SnodeSignature', () => { + afterEach(() => { + Sinon.restore(); + }); + + describe('getSnodeGroupAdminSignatureParams', () => { + beforeEach(() => { + Sinon.stub(NetworkTime, 'now').returns(hardcodedTimestamp); + }); + + describe('retrieve', () => { + it('retrieve namespace ClosedGroupInfo', async () => { + const ret = await SnodeGroupSignature.getSnodeGroupSignature({ + method: 'retrieve', + namespace: SnodeNamespaces.ClosedGroupInfo, + group: { + authData: null, + pubkeyHex: validGroupPk, + secretKey: privKeyUint, + }, + }); + expect(ret.pubkey).to.be.eq(validGroupPk); + + expect(ret.timestamp).to.be.eq(hardcodedTimestamp); + const verificationData = `retrieve${SnodeNamespaces.ClosedGroupInfo}${hardcodedTimestamp}`; + await verifySig(ret, verificationData); + }); + + it('retrieve namespace ClosedGroupKeys', async () => { + const ret = await SnodeGroupSignature.getSnodeGroupSignature({ + method: 'retrieve', + namespace: SnodeNamespaces.ClosedGroupKeys, + group: { + authData: null, + pubkeyHex: validGroupPk, + secretKey: privKeyUint, + }, + }); + expect(ret.pubkey).to.be.eq(validGroupPk); + + expect(ret.timestamp).to.be.eq(hardcodedTimestamp); + const verificationData = `retrieve${SnodeNamespaces.ClosedGroupKeys}${hardcodedTimestamp}`; + + await verifySig(ret, verificationData); + }); + + it('retrieve namespace ClosedGroupMessages', async () => { + const ret = await SnodeGroupSignature.getSnodeGroupSignature({ + method: 'retrieve', + namespace: SnodeNamespaces.ClosedGroupMessages, + group: { + authData: null, + pubkeyHex: validGroupPk, + secretKey: privKeyUint, + }, + }); + expect(ret.pubkey).to.be.eq(validGroupPk); + + expect(ret.timestamp).to.be.eq(hardcodedTimestamp); + const verificationData = `retrieve${SnodeNamespaces.ClosedGroupMessages}${hardcodedTimestamp}`; + await verifySig(ret, verificationData); + }); + }); + + describe('store', () => { + it('store namespace ClosedGroupInfo', async () => { + const ret = await SnodeGroupSignature.getSnodeGroupSignature({ + method: 'store', + namespace: SnodeNamespaces.ClosedGroupInfo, + group: { + authData: null, + pubkeyHex: validGroupPk, + secretKey: privKeyUint, + }, + }); + expect(ret.pubkey).to.be.eq(validGroupPk); + expect(ret.timestamp).to.be.eq(hardcodedTimestamp); + + const verificationData = `store${SnodeNamespaces.ClosedGroupInfo}${hardcodedTimestamp}`; + await verifySig(ret, verificationData); + }); + + it('store namespace ClosedGroupKeys', async () => { + const ret = await SnodeGroupSignature.getSnodeGroupSignature({ + method: 'store', + namespace: SnodeNamespaces.ClosedGroupKeys, + group: { + authData: null, + pubkeyHex: validGroupPk, + secretKey: privKeyUint, + }, + }); + expect(ret.pubkey).to.be.eq(validGroupPk); + + expect(ret.timestamp).to.be.eq(hardcodedTimestamp); + const verificationData = `store${SnodeNamespaces.ClosedGroupKeys}${hardcodedTimestamp}`; + await verifySig(ret, verificationData); + }); + + it('store namespace ClosedGroupMessages', async () => { + const ret = await SnodeGroupSignature.getSnodeGroupSignature({ + method: 'store', + namespace: SnodeNamespaces.ClosedGroupMessages, + group: { + authData: null, + pubkeyHex: validGroupPk, + secretKey: privKeyUint, + }, + }); + expect(ret.pubkey).to.be.eq(validGroupPk); + expect(ret.timestamp).to.be.eq(hardcodedTimestamp); + const verificationData = `store${SnodeNamespaces.ClosedGroupMessages}${hardcodedTimestamp}`; + await verifySig(ret, verificationData); + }); + }); + }); + + describe('getGroupSignatureByHashesParams', () => { + beforeEach(() => { + Sinon.stub(NetworkTime, 'now').returns(hardcodedTimestamp); + }); + + describe('delete', () => { + it('can sign a delete with admin secretkey', async () => { + const hashes = ['hash4321', 'hash4221']; + const group = getEmptyUserGroup(); + + const ret = await SnodeGroupSignature.getGroupSignatureByHashesParams({ + method: 'delete', + groupPk: validGroupPk, + messagesHashes: hashes, + group: { ...group, secretKey: privKeyUint }, + }); + expect(ret.pubkey).to.be.eq(validGroupPk); + expect(ret.messages).to.be.deep.eq(hashes); + + const verificationData = `delete${hashes.join('')}`; + await verifySig(ret, verificationData); + }); + + it.skip('can sign a delete with authData if adminSecretKey is empty', async () => { + // we can't really test this atm. We'd need the full env of wrapper setup as we need need for the subaccountSign itself, part of the wrapper + // const hashes = ['hash4321', 'hash4221']; + // const group = getEmptyUserGroup(); + // const ret = await SnodeGroupSignature.getGroupSignatureByHashesParams({ + // method: 'delete', + // groupPk: validGroupPk, + // messagesHashes: hashes, + // group: { ...group, authData: currentUserSubAccountAuthData }, + // }); + // expect(ret.pubkey).to.be.eq(validGroupPk); + // expect(ret.messages).to.be.deep.eq(hashes); + // const verificationData = `delete${hashes.join('')}`; + // await verifySig(ret, verificationData); + }); + + it('throws if none are set', async () => { + const hashes = ['hash4321', 'hash4221']; + + const group = getEmptyUserGroup(); + const fn = async () => + SnodeGroupSignature.getGroupSignatureByHashesParams({ + method: 'delete', + groupPk: validGroupPk, + messagesHashes: hashes, + group, + }); + expect(fn).to.throw; + }); + }); + }); + + describe('generateUpdateExpiryGroupSignature', () => { + it('throws if groupPk not given', async () => { + const func = async () => { + return SnodeGroupSignature.generateUpdateExpiryGroupSignature({ + group: { pubkeyHex: null as any, secretKey: privKeyUint, authData: null }, + messagesHashes: ['[;p['], + shortenOrExtend: '', + expiryMs: hardcodedTimestamp, + }); + }; + await expect(func()).to.be.rejectedWith( + 'generateUpdateExpiryGroupSignature groupPk is empty' + ); + }); + + it('throws if groupPrivKey is empty', async () => { + const func = async () => { + return SnodeGroupSignature.generateUpdateExpiryGroupSignature({ + group: { + pubkeyHex: validGroupPk as any, + secretKey: new Uint8Array() as any, + authData: null, + }, + + messagesHashes: ['[;p['], + shortenOrExtend: '', + expiryMs: hardcodedTimestamp, + }); + }; + await expect(func()).to.be.rejectedWith( + 'retrieveRequestForGroup: needs either groupSecretKey or authData' + ); + }); + + it('works with valid pubkey and priv key', async () => { + const hashes = ['hash4321', 'hash4221']; + const expiryMs = hardcodedTimestamp; + const shortenOrExtend = ''; + const ret = await SnodeGroupSignature.generateUpdateExpiryGroupSignature({ + group: { pubkeyHex: validGroupPk, secretKey: privKeyUint, authData: null }, + messagesHashes: hashes, + shortenOrExtend: '', + expiryMs, + }); + + expect(ret.pubkey).to.be.eq(validGroupPk); + + const verificationData = `expire${shortenOrExtend}${expiryMs}${hashes.join('')}`; + await verifySig(ret, verificationData); + }); + + it('fails with invalid timestamp', async () => { + const hashes = ['hash4321', 'hash4221']; + const expiryMs = hardcodedTimestamp; + const shortenOrExtend = ''; + const ret = await SnodeGroupSignature.generateUpdateExpiryGroupSignature({ + group: { pubkeyHex: validGroupPk, secretKey: privKeyUint, authData: null }, + messagesHashes: hashes, + shortenOrExtend: '', + expiryMs, + }); + + expect(ret.pubkey).to.be.eq(validGroupPk); + + const verificationData = `expire${shortenOrExtend}${expiryMs}1${hashes.join('')}`; + const func = async () => verifySig(ret, verificationData); + await expect(func()).rejectedWith('sig failed to be verified'); + }); + + it('fails with invalid hashes', async () => { + const hashes = ['hash4321', 'hash4221']; + const expiryMs = hardcodedTimestamp; + const shortenOrExtend = ''; + const ret = await SnodeGroupSignature.generateUpdateExpiryGroupSignature({ + group: { pubkeyHex: validGroupPk, secretKey: privKeyUint, authData: null }, + messagesHashes: hashes, + shortenOrExtend: '', + expiryMs, + }); + + expect(ret.pubkey).to.be.eq(validGroupPk); + + const overriddenHash = hashes.slice(); + overriddenHash[0] = '1111'; + const verificationData = `expire${shortenOrExtend}${expiryMs}${overriddenHash.join('')}`; + const func = async () => verifySig(ret, verificationData); + await expect(func()).rejectedWith('sig failed to be verified'); + }); + + it('fails with invalid number of hashes', async () => { + const hashes = ['hash4321', 'hash4221']; + const expiryMs = hardcodedTimestamp; + const shortenOrExtend = ''; + const ret = await SnodeGroupSignature.generateUpdateExpiryGroupSignature({ + group: { pubkeyHex: validGroupPk, secretKey: privKeyUint, authData: null }, + messagesHashes: hashes, + shortenOrExtend: '', + expiryMs, + }); + + expect(ret.pubkey).to.be.eq(validGroupPk); + + const overriddenHash = [hashes[0]]; + const verificationData = `expire${shortenOrExtend}${expiryMs}${overriddenHash.join('')}`; + const func = async () => verifySig(ret, verificationData); + await expect(func()).rejectedWith('sig failed to be verified'); + }); + }); + + describe('generateUpdateExpiryOurSignature', () => { + it('throws if our ed keypair is not set', async () => { + Sinon.stub(UserUtils, 'getUserED25519KeyPair').resolves(null as any); + + const func = async () => { + const hashes = ['hash4321', 'hash4221']; + const shortenOrExtend = ''; + return SnodeSignature.generateUpdateExpiryOurSignature({ + messagesHashes: hashes, + shortenOrExtend, + timestamp: hardcodedTimestamp, + }); + }; + + await expect(func()).to.be.rejectedWith( + 'getSnodeSignatureParams "expiry": User has no getUserED25519KeyPair()' + ); + }); + + it('throws if invalid hashes', async () => { + Sinon.stub(UserUtils, 'getUserED25519KeyPair').resolves(userEd25519Keypair); + + const hashes = ['hash4321', 'hash4221']; + const shortenOrExtend = ''; + const ret = await SnodeSignature.generateUpdateExpiryOurSignature({ + messagesHashes: hashes, + shortenOrExtend, + timestamp: hardcodedTimestamp, + }); + const overriddenHash = [hashes[0]]; + const verificationData = `expire${shortenOrExtend}${hardcodedTimestamp}${overriddenHash.join( + '' + )}`; + + const func = async () => { + return verifySig(ret, verificationData); + }; + await expect(func()).to.be.rejectedWith('sig failed to be verified'); + }); + + it('throws if invalid timestamp', async () => { + Sinon.stub(UserUtils, 'getUserED25519KeyPair').resolves(userEd25519Keypair); + + const hashes = ['hash4321', 'hash4221']; + const shortenOrExtend = ''; + const ret = await SnodeSignature.generateUpdateExpiryOurSignature({ + messagesHashes: hashes, + shortenOrExtend, + timestamp: hardcodedTimestamp, + }); + const verificationData = `expire${shortenOrExtend}${hardcodedTimestamp}123${hashes.join('')}`; + + const func = async () => { + return verifySig(ret, verificationData); + }; + await expect(func()).to.be.rejectedWith('sig failed to be verified'); + }); + + it('works with valid pubkey and priv key', async () => { + Sinon.stub(UserUtils, 'getUserED25519KeyPair').resolves(userEd25519Keypair); + + const hashes = ['hash4321', 'hash4221']; + const timestamp = hardcodedTimestamp; + const shortenOrExtend = ''; + const ret = await SnodeSignature.generateUpdateExpiryOurSignature({ + messagesHashes: hashes, + shortenOrExtend: '', + timestamp, + }); + + expect(ret.pubkey).to.be.eq(userEd25519Keypair.pubKey); + + const verificationData = `expire${shortenOrExtend}${timestamp}${hashes.join('')}`; + await verifySig(ret, verificationData); + }); + }); +}); diff --git a/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts b/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts index 983da4ca60..a421517374 100644 --- a/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts +++ b/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts @@ -1,8 +1,8 @@ import { expect } from 'chai'; import { beforeEach } from 'mocha'; import Sinon from 'sinon'; -import * as DecryptedAttachmentsManager from '../../../../session/crypto/DecryptedAttachmentsManager'; import { TestUtils } from '../../../test-utils'; +import { DecryptedAttachmentsManager } from '../../../../session/crypto/DecryptedAttachmentsManager'; describe('DecryptedAttachmentsManager', () => { beforeEach(() => { @@ -64,12 +64,6 @@ describe('DecryptedAttachmentsManager', () => { TestUtils.stubCreateObjectUrl(); }); - it('url starts with attachment path but is not already decrypted', () => { - expect( - DecryptedAttachmentsManager.getAlreadyDecryptedMediaUrl('/local/attachment/attachment1') - ).to.be.eq(null); - }); - it('url starts with attachment path but is not already decrypted', async () => { expect( DecryptedAttachmentsManager.getAlreadyDecryptedMediaUrl('/local/attachment/attachment1') @@ -134,7 +128,4 @@ describe('DecryptedAttachmentsManager', () => { }); }); }); - - it.skip('cleanUpOldDecryptedMedias', () => {}); - it.skip('getDecryptedBlob', () => {}); }); diff --git a/ts/test/session/unit/disappearing_messages/DisappearingMessage_test.ts b/ts/test/session/unit/disappearing_messages/DisappearingMessage_test.ts index 3ba76599f3..993f0e29c0 100644 --- a/ts/test/session/unit/disappearing_messages/DisappearingMessage_test.ts +++ b/ts/test/session/unit/disappearing_messages/DisappearingMessage_test.ts @@ -3,7 +3,6 @@ import chaiAsPromised from 'chai-as-promised'; import Sinon from 'sinon'; import { Conversation, ConversationModel } from '../../../../models/conversation'; import { ConversationAttributes } from '../../../../models/conversationAttributes'; -import { GetNetworkTime } from '../../../../session/apis/snode_api/getNetworkTime'; import { DisappearingMessages } from '../../../../session/disappearing_messages'; import { DisappearingMessageConversationModeType, @@ -21,6 +20,7 @@ import { generateVisibleMessage, } from '../../../test-utils/utils'; import { ConversationTypeEnum } from '../../../../models/types'; +import { NetworkTime } from '../../../../util/NetworkTime'; chai.use(chaiAsPromised as any); @@ -38,7 +38,7 @@ describe('DisappearingMessage', () => { } as ConversationAttributes; beforeEach(() => { - Sinon.stub(GetNetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); + Sinon.stub(NetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(ourNumber); }); @@ -174,9 +174,8 @@ describe('DisappearingMessage', () => { it("if it's a Group Conversation and expireTimer > 0 then the message's expirationType is always deleteAfterSend", async () => { const ourConversation = new ConversationModel({ ...conversationArgs, - type: ConversationTypeEnum.GROUP, - // TODO update to 03 prefix when we release new groups - id: '05123456564', + type: ConversationTypeEnum.GROUPV2, + id: TestUtils.generateFakeClosedGroupV2PkStr(), }); const expireTimer = 60; // seconds const expirationMode = 'deleteAfterRead'; // not correct @@ -236,9 +235,8 @@ describe('DisappearingMessage', () => { it("if it's a Group Conversation and expireTimer > 0 then the conversation mode is always deleteAfterSend", async () => { const ourConversation = new ConversationModel({ ...conversationArgs, - type: ConversationTypeEnum.GROUP, - // TODO update to 03 prefix when we release new groups - id: '05123456564', + type: ConversationTypeEnum.GROUPV2, + id: TestUtils.generateFakeClosedGroupV2PkStr(), }); const expirationType = 'deleteAfterRead'; // not correct const expireTimer = 60; // seconds @@ -467,7 +465,7 @@ describe('DisappearingMessage', () => { message.set({ expirationType: 'deleteAfterRead', expireTimer: 300, - sent_at: GetNetworkTime.getNowWithNetworkOffset(), + sent_at: NetworkTime.now(), }); Sinon.stub(message, 'getConversation').returns(conversation); @@ -490,7 +488,7 @@ describe('DisappearingMessage', () => { const message = generateFakeOutgoingPrivateMessage(conversation.get('id')); message.set({ expirationType: 'deleteAfterRead', - sent_at: GetNetworkTime.getNowWithNetworkOffset(), + sent_at: NetworkTime.now(), }); Sinon.stub(message, 'getConversation').returns(conversation); @@ -506,7 +504,7 @@ describe('DisappearingMessage', () => { const message = generateFakeOutgoingPrivateMessage(conversation.get('id')); message.set({ expireTimer: 300, - sent_at: GetNetworkTime.getNowWithNetworkOffset(), + sent_at: NetworkTime.now(), }); Sinon.stub(message, 'getConversation').returns(conversation); @@ -515,7 +513,7 @@ describe('DisappearingMessage', () => { expect(message.getExpirationStartTimestamp(), 'it should be undefined').to.be.undefined; }); it('if expirationStartTimestamp is already defined then it should not have changed', async () => { - const now = GetNetworkTime.getNowWithNetworkOffset(); + const now = NetworkTime.now(); const conversation = new ConversationModel({ ...conversationArgs, id: ourNumber, @@ -547,10 +545,10 @@ describe('DisappearingMessage', () => { it('if the conversation is public it should throw', async () => { const conversation = new ConversationModel({ ...conversationArgs, + id: 'https://example.org', + type: ConversationTypeEnum.GROUP, }); - Sinon.stub(conversation, 'isPublic').returns(true); - const promise = conversation.updateExpireTimer({ providedDisappearingMode: 'deleteAfterSend', providedExpireTimer: 600, @@ -561,7 +559,7 @@ describe('DisappearingMessage', () => { fromConfigMessage: false, }); await expect(promise).is.rejectedWith( - "updateExpireTimer() Disappearing messages aren't supported in communities" + 'updateExpireTimer() Disappearing messages are only supported int groups and private chats' ); }); @@ -607,7 +605,7 @@ describe('DisappearingMessage', () => { providedDisappearingMode: 'deleteAfterSend', providedExpireTimer: 600, providedSource: testPubkey, - receivedAt: GetNetworkTime.getNowWithNetworkOffset(), + sentAt: NetworkTime.now(), fromSync: true, shouldCommitConvo: false, existingMessage: undefined, diff --git a/ts/test/session/unit/disappearing_messages/ExpireRequest_test.ts b/ts/test/session/unit/disappearing_messages/ExpireRequest_test.ts index 1ed906c37d..56ad2f2931 100644 --- a/ts/test/session/unit/disappearing_messages/ExpireRequest_test.ts +++ b/ts/test/session/unit/disappearing_messages/ExpireRequest_test.ts @@ -1,7 +1,8 @@ import chai, { expect } from 'chai'; import chaiAsPromised from 'chai-as-promised'; +import { PubkeyType } from 'libsession_util_nodejs'; import Sinon from 'sinon'; -import { UpdateExpiryOnNodeSubRequest } from '../../../../session/apis/snode_api/SnodeRequestTypes'; +import { UpdateExpiryOnNodeUserSubRequest } from '../../../../session/apis/snode_api/SnodeRequestTypes'; import { ExpireMessageWithExpiryOnSnodeProps, ExpireRequestResponseResults, @@ -10,16 +11,17 @@ import { verifyExpireMsgsResponseSignature, verifyExpireMsgsResponseSignatureProps, } from '../../../../session/apis/snode_api/expireRequest'; -import { GetNetworkTime } from '../../../../session/apis/snode_api/getNetworkTime'; import { UserUtils } from '../../../../session/utils'; import { isValidUnixTimestamp } from '../../../../session/utils/Timestamps'; import { generateFakeSnode } from '../../../test-utils/utils'; +import { NetworkTime } from '../../../../util/NetworkTime'; chai.use(chaiAsPromised as any); describe('ExpireRequest', () => { const getLatestTimestampOffset = 200000; - const ourNumber = '37e1631b002de498caf7c5c1712718bde7f257c6dadeed0c21abf5e939e6c309'; + const ourNumber = + '37e1631b002de498caf7c5c1712718bde7f257c6dadeed0c21abf5e939e6c309' as PubkeyType; const ourUserEd25516Keypair = { pubKey: '37e1631b002de498caf7c5c1712718bde7f257c6dadeed0c21abf5e939e6c309', privKey: @@ -27,7 +29,7 @@ describe('ExpireRequest', () => { }; beforeEach(() => { - Sinon.stub(GetNetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); + Sinon.stub(NetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(ourNumber); Sinon.stub(UserUtils, 'getUserED25519KeyPair').resolves(ourUserEd25516Keypair); }); @@ -44,7 +46,7 @@ describe('ExpireRequest', () => { }; it('builds a request with just the messageHash and expireTimer of 1 minute', async () => { - const request: UpdateExpiryOnNodeSubRequest | null = + const request: UpdateExpiryOnNodeUserSubRequest | null = await buildExpireRequestSingleExpiry(props); expect(request, 'should not return null').to.not.be.null; @@ -53,68 +55,76 @@ describe('ExpireRequest', () => { throw Error('nothing was returned when building the request'); } - expect(request, "method should be 'expire'").to.have.property('method', 'expire'); - expect(request.params.pubkey, 'should have a matching pubkey').to.equal(ourNumber); - expect(request.params.messages, 'messageHash should be in messages array').to.deep.equal( + const signedReq = await request.build(); + + expect(signedReq, "method should be 'expire'").to.have.property('method', 'expire'); + expect(signedReq.params.pubkey, 'should have a matching pubkey').to.equal(ourNumber); + expect(signedReq.params.messages, 'messageHash should be in messages array').to.deep.equal( props.messageHashes ); expect( - request.params.expiry && isValidUnixTimestamp(request?.params.expiry), + signedReq.params.expiry && isValidUnixTimestamp(signedReq.params.expiry), 'expiry should be a valid unix timestamp' ).to.be.true; - expect(request.params.extend, 'extend should be undefined').to.be.undefined; - expect(request.params.shorten, 'shorten should be undefined').to.be.undefined; - expect(request.params.signature, 'signature should not be empty').to.not.be.empty; + expect(signedReq.params.extend, 'extend should be undefined').to.be.undefined; + expect(signedReq.params.shorten, 'shorten should be undefined').to.be.undefined; + expect(signedReq.params.signature, 'signature should not be empty').to.not.be.empty; }); it('builds a request with extend enabled', async () => { - const request: UpdateExpiryOnNodeSubRequest | null = await buildExpireRequestSingleExpiry({ - ...props, - shortenOrExtend: 'extend', - }); + const request: UpdateExpiryOnNodeUserSubRequest | null = await buildExpireRequestSingleExpiry( + { + ...props, + shortenOrExtend: 'extend', + } + ); expect(request, 'should not return null').to.not.be.null; expect(request, 'should not return undefined').to.not.be.undefined; if (!request) { throw Error('nothing was returned when building the request'); } + const signedReq = await request.build(); - expect(request, "method should be 'expire'").to.have.property('method', 'expire'); - expect(request.params.pubkey, 'should have a matching pubkey').to.equal(ourNumber); - expect(request.params.messages, 'messageHash should be in messages array').to.equal( + expect(signedReq, "method should be 'expire'").to.have.property('method', 'expire'); + expect(signedReq.params.pubkey, 'should have a matching pubkey').to.equal(ourNumber); + expect(signedReq.params.messages, 'messageHash should be in messages array').to.equal( props.messageHashes ); expect( - request.params.expiry && isValidUnixTimestamp(request?.params.expiry), + signedReq.params.expiry && isValidUnixTimestamp(signedReq?.params.expiry), 'expiry should be a valid unix timestamp' ).to.be.true; - expect(request.params.extend, 'extend should be true').to.be.true; - expect(request.params.shorten, 'shorten should be undefined').to.be.undefined; - expect(request.params.signature, 'signature should not be empty').to.not.be.empty; + expect(signedReq.params.extend, 'extend should be true').to.be.true; + expect(signedReq.params.shorten, 'shorten should be undefined').to.be.undefined; + expect(signedReq.params.signature, 'signature should not be empty').to.not.be.empty; }); it('builds a request with shorten enabled', async () => { - const request: UpdateExpiryOnNodeSubRequest | null = await buildExpireRequestSingleExpiry({ - ...props, - shortenOrExtend: 'shorten', - }); + const request: UpdateExpiryOnNodeUserSubRequest | null = await buildExpireRequestSingleExpiry( + { + ...props, + shortenOrExtend: 'shorten', + } + ); expect(request, 'should not return null').to.not.be.null; expect(request, 'should not return undefined').to.not.be.undefined; if (!request) { throw Error('nothing was returned when building the request'); } + const signedReq = await request.build(); - expect(request, "method should be 'expire'").to.have.property('method', 'expire'); - expect(request.params.pubkey, 'should have a matching pubkey').to.equal(ourNumber); - expect(request.params.messages, 'messageHash should be in messages array').to.equal( + expect(signedReq, "method should be 'expire'").to.have.property('method', 'expire'); + expect(signedReq.params.pubkey, 'should have a matching pubkey').to.equal(ourNumber); + expect(signedReq.params.messages, 'messageHash should be in messages array').to.equal( props.messageHashes ); expect( - request.params.expiry && isValidUnixTimestamp(request?.params.expiry), + signedReq.params.expiry && isValidUnixTimestamp(signedReq?.params.expiry), 'expiry should be a valid unix timestamp' ).to.be.true; - expect(request.params.extend, 'extend should be undefined').to.be.undefined; - expect(request.params.shorten, 'shorten should be true').to.be.true; - expect(request.params.signature, 'signature should not be empty').to.not.be.empty; + expect(signedReq.params.extend, 'extend should be undefined').to.be.undefined; + expect(signedReq.params.shorten, 'shorten should be true').to.be.true; + expect(signedReq.params.signature, 'signature should not be empty').to.not.be.empty; }); }); diff --git a/ts/test/session/unit/disappearing_messages/GetExpiriesRequest_test.ts b/ts/test/session/unit/disappearing_messages/GetExpiriesRequest_test.ts index 97ea8e3972..f4865b1647 100644 --- a/ts/test/session/unit/disappearing_messages/GetExpiriesRequest_test.ts +++ b/ts/test/session/unit/disappearing_messages/GetExpiriesRequest_test.ts @@ -1,21 +1,21 @@ import chai, { expect } from 'chai'; import chaiAsPromised from 'chai-as-promised'; +import { PubkeyType } from 'libsession_util_nodejs'; import Sinon from 'sinon'; import { GetExpiriesFromNodeSubRequest, fakeHash, } from '../../../../session/apis/snode_api/SnodeRequestTypes'; import { - GetExpiriesFromSnodeProps, GetExpiriesRequestResponseResults, - buildGetExpiriesRequest, processGetExpiriesRequestResponse, } from '../../../../session/apis/snode_api/getExpiriesRequest'; -import { GetNetworkTime } from '../../../../session/apis/snode_api/getNetworkTime'; -import { SnodeSignature } from '../../../../session/apis/snode_api/snodeSignatures'; +import { SnodeSignature } from '../../../../session/apis/snode_api/signature/snodeSignatures'; +import { WithMessagesHashes } from '../../../../session/apis/snode_api/types'; import { UserUtils } from '../../../../session/utils'; import { isValidUnixTimestamp } from '../../../../session/utils/Timestamps'; import { TypedStub, generateFakeSnode, stubWindowLog } from '../../../test-utils/utils'; +import { NetworkTime } from '../../../../util/NetworkTime'; chai.use(chaiAsPromised as any); @@ -23,7 +23,8 @@ describe('GetExpiriesRequest', () => { stubWindowLog(); const getLatestTimestampOffset = 200000; - const ourNumber = '37e1631b002de498caf7c5c1712718bde7f257c6dadeed0c21abf5e939e6c309'; + const ourNumber = + '37e1631b002de498caf7c5c1712718bde7f257c6dadeed0c21abf5e939e6c309' as PubkeyType; const ourUserEd25516Keypair = { pubKey: '37e1631b002de498caf7c5c1712718bde7f257c6dadeed0c21abf5e939e6c309', privKey: @@ -33,7 +34,7 @@ describe('GetExpiriesRequest', () => { let getOurPubKeyStrFromCacheStub: TypedStub; beforeEach(() => { - Sinon.stub(GetNetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); + Sinon.stub(NetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); getOurPubKeyStrFromCacheStub = Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns( ourNumber ); @@ -45,12 +46,13 @@ describe('GetExpiriesRequest', () => { }); describe('buildGetExpiriesRequest', () => { - const props: GetExpiriesFromSnodeProps = { - messageHashes: ['messageHash'], + const props: WithMessagesHashes = { + messagesHashes: ['messageHash'], }; it('builds a valid request given the messageHashes and valid timestamp for now', async () => { - const request: GetExpiriesFromNodeSubRequest | null = await buildGetExpiriesRequest(props); + const unsigned = new GetExpiriesFromNodeSubRequest(props); + const request = await unsigned.build(); expect(request, 'should not return null').to.not.be.null; expect(request, 'should not return undefined').to.not.be.undefined; @@ -61,7 +63,7 @@ describe('GetExpiriesRequest', () => { expect(request, "method should be 'get_expiries'").to.have.property('method', 'get_expiries'); expect(request.params.pubkey, 'should have a matching pubkey').to.equal(ourNumber); expect(request.params.messages, 'messageHashes should match our input').to.deep.equal( - props.messageHashes + props.messagesHashes ); expect( request.params.timestamp && isValidUnixTimestamp(request?.params.timestamp), @@ -69,21 +71,38 @@ describe('GetExpiriesRequest', () => { ).to.be.true; expect(request.params.signature, 'signature should not be empty').to.not.be.empty; }); - it('fails to build a request if our pubkey is missing', async () => { + it('fails to build a request if our pubkey is missing, and throws', async () => { // Modify the stub behavior for this test only we need to return an unsupported type to simulate a missing pubkey (getOurPubKeyStrFromCacheStub as any).returns(undefined); + let errorStr = 'fakeerror'; + try { + const unsigned = new GetExpiriesFromNodeSubRequest(props); + const request = await unsigned.build(); + if (request) { + throw new Error('we should not have been able to build a request'); + } + } catch (e) { + errorStr = e.message; + } - const request: GetExpiriesFromNodeSubRequest | null = await buildGetExpiriesRequest(props); - - expect(request, 'should return null').to.be.null; + expect(errorStr).to.be.eq('[GetExpiriesFromNodeSubRequest] No pubkey found'); }); it('fails to build a request if our signature is missing', async () => { // Modify the stub behavior for this test only we need to return an unsupported type to simulate a missing pubkey - Sinon.stub(SnodeSignature, 'generateGetExpiriesSignature').resolves(null); + Sinon.stub(SnodeSignature, 'generateGetExpiriesOurSignature').resolves(null); - const request: GetExpiriesFromNodeSubRequest | null = await buildGetExpiriesRequest(props); - - expect(request, 'should return null').to.be.null; + const unsigned = new GetExpiriesFromNodeSubRequest(props); + try { + const request = await unsigned.build(); + if (request) { + throw new Error('should not be able to build the request'); + } + throw new Error('fake error'); + } catch (e) { + expect(e.message).to.be.eq( + '[GetExpiriesFromNodeSubRequest] SnodeSignature.generateUpdateExpirySignature returned an empty result messageHash' + ); + } }); }); diff --git a/ts/test/session/unit/libsession_util/libsession_utils_test.ts b/ts/test/session/unit/libsession_util/libsession_utils_test.ts new file mode 100644 index 0000000000..3c65e9bf6b --- /dev/null +++ b/ts/test/session/unit/libsession_util/libsession_utils_test.ts @@ -0,0 +1,352 @@ +import { expect } from 'chai'; +import { GroupPubkeyType, PubkeyType } from 'libsession_util_nodejs'; +import { randombytes_buf } from 'libsodium-wrappers-sumo'; +import Long from 'long'; +import Sinon from 'sinon'; +import { ConfigDumpData } from '../../../../data/configDump/configDump'; +import { SnodeNamespaces } from '../../../../session/apis/snode_api/namespaces'; +import { UserUtils } from '../../../../session/utils'; +import { LibSessionUtil } from '../../../../session/utils/libsession/libsession_utils'; +import { + GenericWrapperActions, + MetaGroupWrapperActions, +} from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { TestUtils } from '../../../test-utils'; +import { NetworkTime } from '../../../../util/NetworkTime'; + +describe('LibSessionUtil saveDumpsToDb', () => { + describe('for group', () => { + let groupPk: GroupPubkeyType; + + beforeEach(() => { + groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + }); + + afterEach(() => { + Sinon.restore(); + }); + + it('does not save to DB if needsDump reports false', async () => { + Sinon.stub(MetaGroupWrapperActions, 'needsDump').resolves(false); + const metaDump = Sinon.stub(MetaGroupWrapperActions, 'metaDump').resolves(new Uint8Array()); + const saveConfigDump = Sinon.stub(ConfigDumpData, 'saveConfigDump').resolves(); + await LibSessionUtil.saveDumpsToDb(groupPk); + expect(saveConfigDump.callCount).to.be.equal(0); + expect(metaDump.callCount).to.be.equal(0); + }); + + it('does save to DB if needsDump reports true', async () => { + Sinon.stub(MetaGroupWrapperActions, 'needsDump').resolves(true); + const dump = [1, 2, 3, 4, 5]; + const metaDump = Sinon.stub(MetaGroupWrapperActions, 'metaDump').resolves( + new Uint8Array(dump) + ); + const saveConfigDump = Sinon.stub(ConfigDumpData, 'saveConfigDump').resolves(); + await LibSessionUtil.saveDumpsToDb(groupPk); + expect(saveConfigDump.callCount).to.be.equal(1); + expect(metaDump.callCount).to.be.equal(1); + expect(metaDump.firstCall.args).to.be.deep.eq([groupPk]); + expect(saveConfigDump.firstCall.args).to.be.deep.eq([ + { + publicKey: groupPk, + variant: `MetaGroupConfig-${groupPk}`, + data: new Uint8Array(dump), + }, + ]); + }); + }); + + describe('for user', () => { + let userDetails: TestUtils.TestUserKeyPairs; + let sessionId: PubkeyType; + + beforeEach(async () => { + userDetails = await TestUtils.generateUserKeyPairs(); + sessionId = userDetails.x25519KeyPair.pubkeyHex; + }); + + afterEach(() => { + Sinon.restore(); + }); + + it('does not save to DB if all needsDump reports false', async () => { + Sinon.stub(GenericWrapperActions, 'needsDump').resolves(false); + const dump = Sinon.stub(GenericWrapperActions, 'dump').resolves(new Uint8Array()); + const saveConfigDump = Sinon.stub(ConfigDumpData, 'saveConfigDump').resolves(); + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(sessionId); + + await LibSessionUtil.saveDumpsToDb(sessionId); + expect(saveConfigDump.callCount).to.be.equal(0); + expect(dump.callCount).to.be.equal(0); + }); + + it('does save to DB if any needsDump reports true', async () => { + Sinon.stub(GenericWrapperActions, 'needsDump') + .resolves(false) + .withArgs('ConvoInfoVolatileConfig') + .resolves(true); + const dump = Sinon.stub(GenericWrapperActions, 'dump').resolves(new Uint8Array()); + const saveConfigDump = Sinon.stub(ConfigDumpData, 'saveConfigDump').resolves(); + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(sessionId); + + await LibSessionUtil.saveDumpsToDb(sessionId); + expect(saveConfigDump.callCount).to.be.equal(1); + expect(dump.callCount).to.be.equal(1); + }); + + it('does save to DB if all needsDump reports true', async () => { + const needsDump = Sinon.stub(GenericWrapperActions, 'needsDump').resolves(true); + const dumped = new Uint8Array([1, 2, 3]); + const dump = Sinon.stub(GenericWrapperActions, 'dump').resolves(dumped); + const saveConfigDump = Sinon.stub(ConfigDumpData, 'saveConfigDump').resolves(); + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(sessionId); + + await LibSessionUtil.saveDumpsToDb(userDetails.x25519KeyPair.pubkeyHex); + expect(needsDump.callCount).to.be.equal(4); + expect(dump.callCount).to.be.equal(4); + expect(needsDump.getCalls().map(call => call.args)).to.be.deep.eq([ + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ]); + expect(saveConfigDump.callCount).to.be.equal(4); + + expect(saveConfigDump.getCalls().map(call => call.args)).to.be.deep.eq([ + [{ variant: 'UserConfig', publicKey: sessionId, data: dumped }], + [{ variant: 'ContactsConfig', publicKey: sessionId, data: dumped }], + [{ variant: 'UserGroupsConfig', publicKey: sessionId, data: dumped }], + [{ variant: 'ConvoInfoVolatileConfig', publicKey: sessionId, data: dumped }], + ]); + + expect(dump.getCalls().map(call => call.args)).to.be.deep.eq([ + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ]); + }); + }); +}); + +describe('LibSessionUtil pendingChangesForGroup', () => { + let groupPk: GroupPubkeyType; + beforeEach(() => { + groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + }); + + afterEach(() => { + Sinon.restore(); + }); + + it('empty results if needsPush is false', async () => { + Sinon.stub(MetaGroupWrapperActions, 'needsPush').resolves(false); + const result = await LibSessionUtil.pendingChangesForGroup(groupPk); + expect(result.allOldHashes.size).to.be.equal(0); + expect(result.messages.length).to.be.equal(0); + }); + + it('valid results if needsPush is true', async () => { + const pushResults = { + groupKeys: { data: new Uint8Array([3, 2, 1]), namespace: 13 }, + groupInfo: { + seqno: 1, + data: new Uint8Array([1, 2, 3]), + hashes: ['123', '333'], + namespace: 12, + }, + groupMember: { + seqno: 2, + data: new Uint8Array([1, 2]), + hashes: ['321', '111'], + namespace: 14, + }, + }; + Sinon.stub(MetaGroupWrapperActions, 'needsPush').resolves(true); + Sinon.stub(MetaGroupWrapperActions, 'push').resolves(pushResults); + Sinon.stub(NetworkTime, 'now').returns(1234); + const result = await LibSessionUtil.pendingChangesForGroup(groupPk); + expect(result.allOldHashes.size).to.be.equal(4); + // check that all of the hashes are there + expect([...result.allOldHashes]).to.have.members([ + ...pushResults.groupInfo.hashes, + ...pushResults.groupMember.hashes, + ]); + + expect(result.messages.length).to.be.equal(3); + // check for the keys push content + expect(result.messages[0]).to.be.deep.eq({ + type: 'GroupKeys', + ciphertext: new Uint8Array([3, 2, 1]), + namespace: 13, + }); + // check for the info push content + expect(result.messages[1]).to.be.deep.eq({ + type: 'GroupInfo', + ciphertext: new Uint8Array([1, 2, 3]), + namespace: 12, + seqno: Long.fromInt(pushResults.groupInfo.seqno), + }); + // check for the members pusu content + expect(result.messages[2]).to.be.deep.eq({ + type: 'GroupMember', + ciphertext: new Uint8Array([1, 2]), + namespace: 14, + seqno: Long.fromInt(pushResults.groupMember.seqno), + }); + }); + + it('skips entry results if needsPush one of the wrapper has no changes', async () => { + const pushResults = { + groupInfo: { + seqno: 1, + data: new Uint8Array([1, 2, 3]), + hashes: ['123', '333'], + namespace: 12, + }, + groupMember: null, + groupKeys: { data: new Uint8Array([3, 2, 1]), namespace: 13 }, + }; + Sinon.stub(MetaGroupWrapperActions, 'needsPush').resolves(true); + Sinon.stub(MetaGroupWrapperActions, 'push').resolves(pushResults); + const result = await LibSessionUtil.pendingChangesForGroup(groupPk); + expect(result.allOldHashes.size).to.be.equal(2); + expect(result.messages.length).to.be.equal(2); + }); +}); + +describe('LibSessionUtil pendingChangesForUs', () => { + beforeEach(async () => {}); + + afterEach(() => { + Sinon.restore(); + }); + + it('empty results if all needsPush is false', async () => { + Sinon.stub(GenericWrapperActions, 'needsPush').resolves(false); + const result = await LibSessionUtil.pendingChangesForUs(); + expect(result.allOldHashes.size).to.be.equal(0); + expect(result.messages.length).to.be.equal(0); + }); + + it('valid results if ConvoVolatile needsPush only is true', async () => { + // this is what would be supposedly returned by libsession + const pushResultsConvo = { + data: randombytes_buf(300), + seqno: 123, + hashes: ['123'], + namespace: SnodeNamespaces.ConvoInfoVolatile, + }; + const needsPush = Sinon.stub(GenericWrapperActions, 'needsPush'); + needsPush.resolves(false).withArgs('ConvoInfoVolatileConfig').resolves(true); + + const push = Sinon.stub(GenericWrapperActions, 'push') + .throws() + .withArgs('ConvoInfoVolatileConfig') + .resolves(pushResultsConvo); + + Sinon.stub(NetworkTime, 'now').returns(1234); + const result = await LibSessionUtil.pendingChangesForUs(); + expect(needsPush.callCount).to.be.eq(4); + expect(needsPush.getCalls().map(m => m.args)).to.be.deep.eq([ + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ]); + + expect(push.callCount).to.be.eq(1); + expect(push.getCalls().map(m => m.args)).to.be.deep.eq([['ConvoInfoVolatileConfig']]); + + // check that all of the hashes are there + expect(result.allOldHashes.size).to.be.equal(1); + expect([...result.allOldHashes]).to.have.members([...pushResultsConvo.hashes]); + + // check for the messages to push are what we expect + expect(result.messages).to.be.deep.eq([ + { + ciphertext: pushResultsConvo.data, + namespace: pushResultsConvo.namespace, + seqno: Long.fromNumber(pushResultsConvo.seqno), + }, + ]); + }); + + it('valid results if all wrappers needsPush only are true', async () => { + // this is what would be supposedly returned by libsession + const pushConvo = { + data: randombytes_buf(300), + seqno: 123, + hashes: ['123'], + namespace: SnodeNamespaces.ConvoInfoVolatile, + }; + const pushContacts = { + data: randombytes_buf(300), + seqno: 321, + hashes: ['321', '4444'], + namespace: SnodeNamespaces.UserContacts, + }; + const pushGroups = { + data: randombytes_buf(300), + seqno: 222, + hashes: ['222', '5555'], + namespace: SnodeNamespaces.UserGroups, + }; + const pushUser = { + data: randombytes_buf(300), + seqno: 111, + hashes: ['111'], + namespace: SnodeNamespaces.UserProfile, + }; + const needsPush = Sinon.stub(GenericWrapperActions, 'needsPush'); + needsPush.resolves(true); + + const push = Sinon.stub(GenericWrapperActions, 'push'); + push + .throws() + .withArgs('ContactsConfig') + .resolves(pushContacts) + .withArgs('UserConfig') + .resolves(pushUser) + .withArgs('UserGroupsConfig') + .resolves(pushGroups) + .withArgs('ConvoInfoVolatileConfig') + .resolves(pushConvo); + + Sinon.stub(NetworkTime, 'now').returns(1234); + const result = await LibSessionUtil.pendingChangesForUs(); + expect(needsPush.callCount).to.be.eq(4); + expect(needsPush.getCalls().map(m => m.args)).to.be.deep.eq([ + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ]); + + expect(push.callCount).to.be.eq(4); + expect(push.getCalls().map(m => m.args)).to.be.deep.eq([ + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ]); + + // check that all of the hashes are there + expect(result.allOldHashes.size).to.be.equal(6); + expect([...result.allOldHashes]).to.have.members([ + ...pushContacts.hashes, + ...pushConvo.hashes, + ...pushGroups.hashes, + ...pushUser.hashes, + ]); + + // check for the messages to push are what we expect + expect(result.messages).to.be.deep.eq( + [pushUser, pushContacts, pushGroups, pushConvo].map(m => ({ + ciphertext: m.data, + namespace: m.namespace, + seqno: Long.fromNumber(m.seqno), + })) + ); + }); +}); diff --git a/ts/test/session/unit/libsession_wrapper/libsession_multi_encrypt_test.ts b/ts/test/session/unit/libsession_wrapper/libsession_multi_encrypt_test.ts new file mode 100644 index 0000000000..64c5b50c76 --- /dev/null +++ b/ts/test/session/unit/libsession_wrapper/libsession_multi_encrypt_test.ts @@ -0,0 +1,144 @@ +import { expect } from 'chai'; +import { MultiEncryptWrapperNode, UserGroupsWrapperNode } from 'libsession_util_nodejs'; +import Sinon from 'sinon'; +import { fromHexToArray } from '../../../../session/utils/String'; +import { TestUtils } from '../../../test-utils'; + +describe('libsession_multi_encrypt', () => { + // let us: TestUserKeyPairs; + // let groupX25519SecretKey: Uint8Array; + + beforeEach(async () => { + // us = await TestUtils.generateUserKeyPairs(); + // const group = await TestUtils.generateGroupV2(us.ed25519KeyPair.privKeyBytes); + // if (!group.secretKey) { + // throw new Error('failed to create grou[p'); + // } + // groupX25519SecretKey = group.secretKey; + }); + afterEach(() => { + Sinon.restore(); + }); + + describe('encrypt/decrypt multi encrypt/decrypt message', () => { + it('can encrypt/decrypt message one message to one recipient', async () => { + const toEncrypt = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; + const plaintext = new Uint8Array(toEncrypt); + const domain = 'SessionGroupKickedMessage'; + + const us = await TestUtils.generateUserKeyPairs(); + const userXPk = us.x25519KeyPair.pubKey.slice(1); // remove 05 prefix + const userSk = us.ed25519KeyPair.privKeyBytes; + + const groupWrapper = new UserGroupsWrapperNode(us.ed25519KeyPair.privKeyBytes, null); + const group = groupWrapper.createGroup(); + if (!group.secretKey) { + throw new Error('failed to create group'); + } + const groupEd25519SecretKey = group.secretKey; + const groupEd25519Pubkey = fromHexToArray(group.pubkeyHex).slice(1); // remove 03 prefix + + const encrypted = MultiEncryptWrapperNode.multiEncrypt({ + messages: [plaintext], + recipients: [userXPk], + ed25519SecretKey: groupEd25519SecretKey, + domain, + }); + const decrypted = MultiEncryptWrapperNode.multiDecryptEd25519({ + domain, + encoded: encrypted, + userEd25519SecretKey: userSk, + senderEd25519Pubkey: groupEd25519Pubkey, + }); + expect(decrypted).to.be.deep.eq(Buffer.from(toEncrypt)); + }); + + it('can encrypt/decrypt message multiple messages to multiple recipients', async () => { + const toEncrypt1 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; + const toEncrypt2 = [1, 2, 2, 3, 4, 5, 6, 7, 8, 9]; + const plaintext1 = new Uint8Array(toEncrypt1); + const plaintext2 = new Uint8Array(toEncrypt2); + const domain = 'SessionGroupKickedMessage'; + + const user1 = await TestUtils.generateUserKeyPairs(); + const user1XPk = user1.x25519KeyPair.pubKey.slice(1); // remove 05 prefix + const user1Sk = user1.ed25519KeyPair.privKeyBytes; + const user2 = await TestUtils.generateUserKeyPairs(); + const user2XPk = user2.x25519KeyPair.pubKey.slice(1); // remove 05 prefix + const user2Sk = user2.ed25519KeyPair.privKeyBytes; + + const groupWrapper = new UserGroupsWrapperNode(user1.ed25519KeyPair.privKeyBytes, null); + const group = groupWrapper.createGroup(); + if (!group.secretKey) { + throw new Error('failed to create group'); + } + const groupEd25519SecretKey = group.secretKey; + const groupEd25519Pubkey = fromHexToArray(group.pubkeyHex).slice(1); // remove 03 prefix + + const encrypted = MultiEncryptWrapperNode.multiEncrypt({ + messages: [plaintext1, plaintext2], + recipients: [user1XPk, user2XPk], + ed25519SecretKey: groupEd25519SecretKey, + domain, + }); + const decrypted1 = MultiEncryptWrapperNode.multiDecryptEd25519({ + domain, + encoded: encrypted, + userEd25519SecretKey: user1Sk, + senderEd25519Pubkey: groupEd25519Pubkey, + }); + + const decrypted2 = MultiEncryptWrapperNode.multiDecryptEd25519({ + domain, + encoded: encrypted, + userEd25519SecretKey: user2Sk, + senderEd25519Pubkey: groupEd25519Pubkey, + }); + expect(decrypted1).to.be.deep.eq(Buffer.from(toEncrypt1)); + expect(decrypted2).to.be.deep.eq(Buffer.from(toEncrypt2)); + }); + + it('can encrypt/decrypt one message to multiple recipients', async () => { + const toEncrypt1 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; + const plaintext1 = new Uint8Array(toEncrypt1); + const domain = 'SessionGroupKickedMessage'; + + const user1 = await TestUtils.generateUserKeyPairs(); + const user1XPk = user1.x25519KeyPair.pubKey.slice(1); // remove 05 prefix + const user1Sk = user1.ed25519KeyPair.privKeyBytes; + const user2 = await TestUtils.generateUserKeyPairs(); + const user2XPk = user2.x25519KeyPair.pubKey.slice(1); // remove 05 prefix + const user2Sk = user2.ed25519KeyPair.privKeyBytes; + + const groupWrapper = new UserGroupsWrapperNode(user1.ed25519KeyPair.privKeyBytes, null); + const group = groupWrapper.createGroup(); + if (!group.secretKey) { + throw new Error('failed to create group'); + } + const groupEd25519SecretKey = group.secretKey; + const groupEd25519Pubkey = fromHexToArray(group.pubkeyHex).slice(1); // remove 03 prefix + + const encrypted = MultiEncryptWrapperNode.multiEncrypt({ + messages: [plaintext1], + recipients: [user1XPk, user2XPk], + ed25519SecretKey: groupEd25519SecretKey, + domain, + }); + const decrypted1 = MultiEncryptWrapperNode.multiDecryptEd25519({ + domain, + encoded: encrypted, + userEd25519SecretKey: user1Sk, + senderEd25519Pubkey: groupEd25519Pubkey, + }); + + const decrypted2 = MultiEncryptWrapperNode.multiDecryptEd25519({ + domain, + encoded: encrypted, + userEd25519SecretKey: user2Sk, + senderEd25519Pubkey: groupEd25519Pubkey, + }); + expect(decrypted1).to.be.deep.eq(Buffer.from(toEncrypt1)); + expect(decrypted2).to.be.deep.eq(Buffer.from(toEncrypt1)); + }); + }); +}); diff --git a/ts/test/session/unit/libsession_wrapper/libsession_wrapper_contacts_test.ts b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_contacts_test.ts index b86dcfbf11..763bbd8198 100644 --- a/ts/test/session/unit/libsession_wrapper/libsession_wrapper_contacts_test.ts +++ b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_contacts_test.ts @@ -3,13 +3,13 @@ import { expect } from 'chai'; import Sinon from 'sinon'; import { ConversationModel } from '../../../../models/conversation'; import { ConversationAttributes } from '../../../../models/conversationAttributes'; -import { GetNetworkTime } from '../../../../session/apis/snode_api/getNetworkTime'; -import { getConversationController } from '../../../../session/conversations'; +import { ConvoHub } from '../../../../session/conversations'; import { UserUtils } from '../../../../session/utils'; import { SessionUtilContact } from '../../../../session/utils/libsession/libsession_utils_contacts'; import { TestUtils } from '../../../test-utils'; import { stubWindowLog } from '../../../test-utils/utils/stubbing'; import { ConversationTypeEnum, CONVERSATION_PRIORITIES } from '../../../../models/types'; +import { NetworkTime } from '../../../../util/NetworkTime'; describe('libsession_contacts', () => { stubWindowLog(); @@ -26,7 +26,7 @@ describe('libsession_contacts', () => { } as ConversationAttributes; beforeEach(() => { - Sinon.stub(GetNetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); + Sinon.stub(NetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(ourNumber); TestUtils.stubLibSessionWorker(undefined); }); @@ -228,7 +228,7 @@ describe('libsession_contacts', () => { ...validArgs, ...contactArgs, } as ConversationAttributes); - Sinon.stub(getConversationController(), 'get').returns(contact); + Sinon.stub(ConvoHub.use(), 'get').returns(contact); Sinon.stub(SessionUtilContact, 'isContactToStoreInWrapper').returns(true); const wrapperContact = await SessionUtilContact.insertContactFromDBIntoWrapperAndRefresh( @@ -282,7 +282,7 @@ describe('libsession_contacts', () => { expirationMode: 'deleteAfterSend', expireTimer: 300, }); - Sinon.stub(getConversationController(), 'get').returns(contact); + Sinon.stub(ConvoHub.use(), 'get').returns(contact); Sinon.stub(SessionUtilContact, 'isContactToStoreInWrapper').returns(true); const wrapperContact = await SessionUtilContact.insertContactFromDBIntoWrapperAndRefresh( diff --git a/ts/test/session/unit/libsession_wrapper/libsession_wrapper_metagroup_test.ts b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_metagroup_test.ts new file mode 100644 index 0000000000..a05c2bfe3d --- /dev/null +++ b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_metagroup_test.ts @@ -0,0 +1,376 @@ +import { expect } from 'chai'; +import { + GroupMemberGet, + MetaGroupWrapperNode, + PubkeyType, + UserGroupsWrapperNode, +} from 'libsession_util_nodejs'; +import { range } from 'lodash'; +import Sinon from 'sinon'; +import { HexString } from '../../../../node/hexStrings'; +import { toFixedUint8ArrayOfLength } from '../../../../types/sqlSharedTypes'; +import { TestUtils } from '../../../test-utils'; +import { TestUserKeyPairs } from '../../../test-utils/utils'; + +function profilePicture() { + return { key: new Uint8Array(range(0, 32)), url: `${Math.random()}` }; +} + +function emptyMember(pubkeyHex: PubkeyType): GroupMemberGet { + return { + memberStatus: 'INVITE_NOT_SENT', + name: '', + profilePicture: { + key: null, + url: null, + }, + nominatedAdmin: false, + removedStatus: 'NOT_REMOVED', + pubkeyHex, + }; +} + +describe('libsession_metagroup', () => { + let us: TestUserKeyPairs; + let groupCreated: ReturnType; + let metaGroupWrapper: MetaGroupWrapperNode; + let member: PubkeyType; + let member2: PubkeyType; + + beforeEach(async () => { + us = await TestUtils.generateUserKeyPairs(); + const groupWrapper = new UserGroupsWrapperNode(us.ed25519KeyPair.privateKey, null); + groupCreated = groupWrapper.createGroup(); + + metaGroupWrapper = new MetaGroupWrapperNode({ + groupEd25519Pubkey: toFixedUint8ArrayOfLength( + HexString.fromHexString(groupCreated.pubkeyHex.slice(2)), + 32 + ).buffer, + groupEd25519Secretkey: groupCreated.secretKey, + metaDumped: null, + userEd25519Secretkey: toFixedUint8ArrayOfLength(us.ed25519KeyPair.privateKey, 64).buffer, + }); + metaGroupWrapper.keyRekey(); + member = TestUtils.generateFakePubKeyStr(); + member2 = TestUtils.generateFakePubKeyStr(); + }); + afterEach(() => { + Sinon.restore(); + }); + + describe("encrypt/decrypt group's message", () => { + it('can encrypt/decrypt message for group with us as author', async () => { + const plaintext = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; + const toEncrypt = new Uint8Array(plaintext); + const [encrypted] = metaGroupWrapper.encryptMessages([toEncrypt]); + const decrypted = metaGroupWrapper.decryptMessage(encrypted); + + expect(decrypted.plaintext).to.be.deep.eq(toEncrypt); + expect(decrypted.pubkeyHex).to.be.deep.eq(us.x25519KeyPair.pubkeyHex); + }); + + it('throws when encrypt/decrypt message when content is messed up', async () => { + const plaintext = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; + const toEncrypt = new Uint8Array(plaintext); + const [encrypted] = metaGroupWrapper.encryptMessages([toEncrypt]); + + encrypted[1] -= 1; + const func = () => metaGroupWrapper.decryptMessage(encrypted); + expect(func).to.throw('unable to decrypt ciphertext with any current group keys'); + }); + }); + + describe('info', () => { + it('all fields are accounted for', () => { + const info = metaGroupWrapper.infoGet(); + expect(Object.keys(info).length).to.be.eq( + 7, // if you change this value, also make sure you add a test, testing that field, below + 'this test is designed to fail if you need to add tests to test a new field of libsession' + ); + }); + + it('can set and recover group name', () => { + expect(metaGroupWrapper.infoGet().name).to.be.deep.eq(null); + const info = metaGroupWrapper.infoGet(); + info.name = 'fake name'; + metaGroupWrapper.infoSet(info); + expect(metaGroupWrapper.infoGet().name).to.be.deep.eq('fake name'); + }); + + it('can set and recover group createdAt', () => { + const expected = 1234; + expect(metaGroupWrapper.infoGet().createdAtSeconds).to.be.deep.eq(null); + const info = metaGroupWrapper.infoGet(); + info.createdAtSeconds = expected; + metaGroupWrapper.infoSet(info); + expect(metaGroupWrapper.infoGet().createdAtSeconds).to.be.deep.eq(expected); + }); + + it('can set and recover group deleteAttachBeforeSeconds', () => { + const expected = 1234; + expect(metaGroupWrapper.infoGet().deleteAttachBeforeSeconds).to.be.deep.eq(null); + const info = metaGroupWrapper.infoGet(); + info.deleteAttachBeforeSeconds = expected; + metaGroupWrapper.infoSet(info); + expect(metaGroupWrapper.infoGet().deleteAttachBeforeSeconds).to.be.deep.eq(expected); + }); + + it('can set and recover group deleteBeforeSeconds', () => { + const expected = 1234; + expect(metaGroupWrapper.infoGet().deleteBeforeSeconds).to.be.deep.eq(null); + const info = metaGroupWrapper.infoGet(); + info.deleteBeforeSeconds = expected; + metaGroupWrapper.infoSet(info); + expect(metaGroupWrapper.infoGet().deleteBeforeSeconds).to.be.deep.eq(expected); + }); + + it('can set and recover group expirySeconds', () => { + const expected = 1234; + expect(metaGroupWrapper.infoGet().expirySeconds).to.be.deep.eq(null); + const info = metaGroupWrapper.infoGet(); + info.expirySeconds = expected; + metaGroupWrapper.infoSet(info); + expect(metaGroupWrapper.infoGet().expirySeconds).to.be.deep.eq(expected); + }); + + it('can set and recover group isDestroyed', () => { + expect(metaGroupWrapper.infoGet().isDestroyed).to.be.deep.eq(false); + metaGroupWrapper.infoDestroy(); + expect(metaGroupWrapper.infoGet().isDestroyed).to.be.deep.eq(true); + }); + + it('can set and recover group profilePicture', () => { + const expected = { key: new Uint8Array(range(0, 32)), url: '1234' }; + expect(metaGroupWrapper.infoGet().profilePicture).to.be.deep.eq({ url: null, key: null }); + const info = metaGroupWrapper.infoGet(); + + info.profilePicture = expected; + metaGroupWrapper.infoSet(info); + expect(metaGroupWrapper.infoGet().profilePicture).to.be.deep.eq(expected); + }); + }); + + describe('members', () => { + it('all fields are accounted for', () => { + const memberCreated = metaGroupWrapper.memberGetOrConstruct(member); + console.info('Object.keys(memberCreated) ', JSON.stringify(Object.keys(memberCreated))); + expect(Object.keys(memberCreated).length).to.be.eq( + 6, // if you change this value, also make sure you add a test, testing that new field, below + 'this test is designed to fail if you need to add tests to test a new field of libsession' + ); + }); + + it('can add member by setting its promoted state, both ok and nok', () => { + metaGroupWrapper.memberConstructAndSet(member); + metaGroupWrapper.memberSetPromotionSent(member); + expect(metaGroupWrapper.memberGetAll()).to.be.deep.eq([ + { + ...emptyMember(member), + nominatedAdmin: true, + memberStatus: 'PROMOTION_SENT', + }, + ]); + + metaGroupWrapper.memberConstructAndSet(member2); + metaGroupWrapper.memberSetPromotionFailed(member2); + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(2); + // the list is sorted by member pk, which means that index based test do not work + expect(metaGroupWrapper.memberGet(member2)).to.be.deep.eq({ + ...emptyMember(member2), + nominatedAdmin: true, + memberStatus: 'PROMOTION_FAILED', + }); + + // we test the admin: true case below + }); + + it('can add member by setting its invited state, both ok and nok', () => { + metaGroupWrapper.memberConstructAndSet(member); + + metaGroupWrapper.memberSetInvited(member, false); // with invite success + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(1); + expect(metaGroupWrapper.memberGetAll()).to.be.deep.eq([ + { + ...emptyMember(member), + memberStatus: 'INVITE_SENT', + }, + ]); + + metaGroupWrapper.memberConstructAndSet(member2); + + metaGroupWrapper.memberSetInvited(member2, true); // with invite failed + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(2); + expect(metaGroupWrapper.memberGet(member2)).to.be.deep.eq({ + ...emptyMember(member2), + memberStatus: 'INVITE_FAILED', + }); + }); + + it('can add member by setting its accepted state', () => { + metaGroupWrapper.memberConstructAndSet(member); + + metaGroupWrapper.memberSetAccepted(member); + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(1); + expect(metaGroupWrapper.memberGetAll()[0]).to.be.deep.eq({ + ...emptyMember(member), + memberStatus: 'INVITE_ACCEPTED', + }); + + metaGroupWrapper.memberConstructAndSet(member2); + + metaGroupWrapper.memberSetAccepted(member2); + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(2); + expect(metaGroupWrapper.memberGet(member2)).to.be.deep.eq({ + ...emptyMember(member2), + memberStatus: 'INVITE_ACCEPTED', + }); + }); + + it('can erase member', () => { + metaGroupWrapper.memberConstructAndSet(member); + metaGroupWrapper.memberConstructAndSet(member2); + + metaGroupWrapper.memberSetAccepted(member); + metaGroupWrapper.memberSetPromoted(member2); + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(2); + + expect(metaGroupWrapper.memberGet(member)).to.be.deep.eq({ + ...emptyMember(member), + memberStatus: 'INVITE_ACCEPTED', + }); + expect(metaGroupWrapper.memberGet(member2)).to.be.deep.eq({ + ...emptyMember(member2), + memberStatus: 'PROMOTION_SENT', + nominatedAdmin: true, + }); + + const rekeyed = metaGroupWrapper.memberEraseAndRekey([member2]); + expect(rekeyed).to.be.eq(true); + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(1); + expect(metaGroupWrapper.memberGetAll()[0]).to.be.deep.eq({ + ...emptyMember(member), + memberStatus: 'INVITE_ACCEPTED', + }); + }); + + it('can add via name set', () => { + metaGroupWrapper.memberConstructAndSet(member); + metaGroupWrapper.memberSetNameTruncated(member, 'member name'); + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(1); + expect(metaGroupWrapper.memberGetAll()[0]).to.be.deep.eq({ + ...emptyMember(member), + name: 'member name', + }); + }); + + it('can add via profile picture set', () => { + const pic = profilePicture(); + metaGroupWrapper.memberConstructAndSet(member); + metaGroupWrapper.memberSetProfilePicture(member, pic); + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(1); + const expected = { ...emptyMember(member), profilePicture: pic }; + + expect(metaGroupWrapper.memberGetAll()[0]).to.be.deep.eq(expected); + }); + + it('can add via admin set', () => { + metaGroupWrapper.memberConstructAndSet(member); + metaGroupWrapper.memberSetPromotionAccepted(member); + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(1); + const expected: GroupMemberGet = { + ...emptyMember(member), + nominatedAdmin: true, + memberStatus: 'PROMOTION_ACCEPTED', + }; + + expect(metaGroupWrapper.memberGetAll()[0]).to.be.deep.eq(expected); + }); + + it('can simply add, and has the correct default', () => { + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(0); + metaGroupWrapper.memberConstructAndSet(member); + expect(metaGroupWrapper.memberGetAll()).to.be.deep.eq([emptyMember(member)]); + }); + + it('can mark as removed with messages', () => { + metaGroupWrapper.memberConstructAndSet(member); + metaGroupWrapper.membersMarkPendingRemoval([member], true); + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(1); + const expected: GroupMemberGet = { + ...emptyMember(member), + removedStatus: 'REMOVED_MEMBER_AND_MESSAGES', + memberStatus: 'INVITE_ACCEPTED', // marking a member as pending removal auto-marks him as accepted (so we don't retry sending an invite) + }; + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(1); + expect(metaGroupWrapper.memberGetAll()[0]).to.be.deep.eq(expected); + }); + + it('can mark as removed without messages', () => { + metaGroupWrapper.memberConstructAndSet(member); + metaGroupWrapper.membersMarkPendingRemoval([member], false); + expect(metaGroupWrapper.memberGetAll().length).to.be.deep.eq(1); + const expected: GroupMemberGet = { + ...emptyMember(member), + removedStatus: 'REMOVED_MEMBER', + memberStatus: 'INVITE_ACCEPTED', // marking a member as pending removal auto-marks him as accepted (so we don't retry sending an invite) + }; + expect(metaGroupWrapper.memberGetAll()).to.be.deep.eq([expected]); + }); + }); + + describe('keys', () => { + it('fresh group does not need rekey', () => { + expect(metaGroupWrapper.keysNeedsRekey()).to.be.eq( + false, + 'rekey should be false on fresh group' + ); + }); + + it.skip('merging a key conflict marks needsRekey to true', () => { + const metaGroupWrapper2 = new MetaGroupWrapperNode({ + groupEd25519Pubkey: toFixedUint8ArrayOfLength( + HexString.fromHexString(groupCreated.pubkeyHex.slice(2)), + 32 + ).buffer, + groupEd25519Secretkey: groupCreated.secretKey, + metaDumped: null, + userEd25519Secretkey: toFixedUint8ArrayOfLength(us.ed25519KeyPair.privateKey, 64).buffer, + }); + + // mark current user as admin + metaGroupWrapper.memberSetPromotionAccepted(us.x25519KeyPair.pubkeyHex); + metaGroupWrapper2.memberSetPromotionAccepted(us.x25519KeyPair.pubkeyHex); + + // add 2 normal members to each of those wrappers + const m1 = TestUtils.generateFakePubKeyStr(); + const m2 = TestUtils.generateFakePubKeyStr(); + metaGroupWrapper.memberSetAccepted(m1); + metaGroupWrapper.memberSetAccepted(m2); + metaGroupWrapper2.memberSetAccepted(m1); + metaGroupWrapper2.memberSetAccepted(m2); + + expect(metaGroupWrapper.keysNeedsRekey()).to.be.eq(false); + expect(metaGroupWrapper2.keysNeedsRekey()).to.be.eq(false); + + // remove m2 from wrapper2, and m1 from wrapper1 + const rekeyed1 = metaGroupWrapper2.memberEraseAndRekey([m2]); + const rekeyed2 = metaGroupWrapper.memberEraseAndRekey([m1]); + expect(rekeyed1).to.be.eq(true); + expect(rekeyed2).to.be.eq(true); + + // const push1 = metaGroupWrapper.push(); + // metaGroupWrapper2.metaMerge([push1]); + + // const wrapper2Rekeyed = metaGroupWrapper2.keyRekey(); + // metaGroupWrapper.keyRekey(); + + // const loadedKey = metaGroupWrapper.loadKeyMessage('fakehash1', wrapper2Rekeyed, Date.now()); + // expect(loadedKey).to.be.eq(true, 'key should have been loaded'); + expect(metaGroupWrapper.keysNeedsRekey()).to.be.eq( + true, + 'rekey should be true for after add' + ); + }); + }); +}); diff --git a/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_groups_test.ts b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_groups_test.ts index 58c1e0e55a..b917742fdb 100644 --- a/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_groups_test.ts +++ b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_groups_test.ts @@ -5,13 +5,14 @@ import { describe } from 'mocha'; import Sinon from 'sinon'; import { ConversationModel } from '../../../../models/conversation'; import { ConversationAttributes } from '../../../../models/conversationAttributes'; -import { GetNetworkTime } from '../../../../session/apis/snode_api/getNetworkTime'; -import { getConversationController } from '../../../../session/conversations'; +import { ConvoHub } from '../../../../session/conversations'; import { UserUtils } from '../../../../session/utils'; +import { toHex } from '../../../../session/utils/String'; import { SessionUtilUserGroups } from '../../../../session/utils/libsession/libsession_utils_user_groups'; import { TestUtils } from '../../../test-utils'; import { generateFakeECKeyPair, stubWindowLog } from '../../../test-utils/utils'; import { CONVERSATION_PRIORITIES, ConversationTypeEnum } from '../../../../models/types'; +import { NetworkTime } from '../../../../util/NetworkTime'; describe('libsession_user_groups', () => { stubWindowLog(); @@ -26,7 +27,7 @@ describe('libsession_user_groups', () => { } as ConversationAttributes; beforeEach(() => { - Sinon.stub(GetNetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); + Sinon.stub(NetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(ourNumber); TestUtils.stubLibSessionWorker(undefined); }); @@ -61,7 +62,7 @@ describe('libsession_user_groups', () => { const validLegacyGroupArgs = { ...validArgs, type: ConversationTypeEnum.GROUP, - id: '05123456564', + id: TestUtils.generateFakePubKeyStr(), } as ConversationAttributes; it('includes legacy group', () => { @@ -75,14 +76,7 @@ describe('libsession_user_groups', () => { }); it('exclude legacy group left', () => { - expect( - SessionUtilUserGroups.isUserGroupToStoreInWrapper( - new ConversationModel({ - ...validLegacyGroupArgs, - left: true, - }) - ) - ).to.be.eq(false); + // we cannot have a left group anymore. It's removed entirely when we leave it }); it('exclude legacy group kicked', () => { expect( @@ -123,7 +117,7 @@ describe('libsession_user_groups', () => { SessionUtilUserGroups.isUserGroupToStoreInWrapper( new ConversationModel({ ...validArgs, - type: ConversationTypeEnum.GROUPV3, + type: ConversationTypeEnum.GROUPV2, id: '03123456564', }) ) @@ -165,12 +159,13 @@ describe('libsession_user_groups', () => { describe('LegacyGroups', () => { describe('insertGroupsFromDBIntoWrapperAndRefresh', () => { + const asHex = toHex(groupECKeyPair.publicKeyData); const groupArgs = { - id: groupECKeyPair.publicKeyData.toString(), + id: asHex, displayNameInProfile: 'Test Group', expirationMode: 'off', expireTimer: 0, - members: [groupECKeyPair.publicKeyData.toString()], + members: [asHex], } as ConversationAttributes; it('returns wrapper values that match with the inputted group', async () => { @@ -178,7 +173,7 @@ describe('libsession_user_groups', () => { ...validArgs, ...groupArgs, }); - Sinon.stub(getConversationController(), 'get').returns(group); + Sinon.stub(ConvoHub.use(), 'get').returns(group); Sinon.stub(SessionUtilUserGroups, 'isUserGroupToStoreInWrapper').returns(true); TestUtils.stubData('getLatestClosedGroupEncryptionKeyPair').resolves( groupECKeyPair.toHexKeyPair() @@ -235,7 +230,7 @@ describe('libsession_user_groups', () => { expirationMode: 'deleteAfterSend', expireTimer: 300, }); - Sinon.stub(getConversationController(), 'get').returns(group); + Sinon.stub(ConvoHub.use(), 'get').returns(group); Sinon.stub(SessionUtilUserGroups, 'isUserGroupToStoreInWrapper').returns(true); TestUtils.stubData('getLatestClosedGroupEncryptionKeyPair').resolves( groupECKeyPair.toHexKeyPair() diff --git a/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_profile.ts b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_profile.ts new file mode 100644 index 0000000000..1af72e7fcd --- /dev/null +++ b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_profile.ts @@ -0,0 +1,26 @@ +/* eslint-disable no-unused-expressions */ +import { expect } from 'chai'; +import Sinon from 'sinon'; + +import { SessionUtilUserProfile } from '../../../../session/utils/libsession/libsession_utils_user_profile'; +import { UserUtils } from '../../../../session/utils'; +import { TestUtils } from '../../../test-utils'; + +describe('libsession_wrapper', () => { + afterEach(() => { + Sinon.restore(); + }); + + it('isUserProfileToStoreInWrapper returns true if thats our convo', () => { + const us = TestUtils.generateFakePubKeyStr(); + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(us); + expect(SessionUtilUserProfile.isUserProfileToStoreInWrapper(us)).to.be.true; + }); + + it('isUserProfileToStoreInWrapper returns false if thats NOT our convo', () => { + const us = TestUtils.generateFakePubKeyStr(); + const notUs = TestUtils.generateFakePubKeyStr(); + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(us); + expect(SessionUtilUserProfile.isUserProfileToStoreInWrapper(notUs)).to.be.false; + }); +}); diff --git a/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_profile_test.ts b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_profile_test.ts index 29d14fd398..c0cd21453f 100644 --- a/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_profile_test.ts +++ b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_user_profile_test.ts @@ -4,13 +4,13 @@ import Sinon from 'sinon'; import { ConversationModel } from '../../../../models/conversation'; import { ConversationAttributes } from '../../../../models/conversationAttributes'; -import { GetNetworkTime } from '../../../../session/apis/snode_api/getNetworkTime'; -import { getConversationController } from '../../../../session/conversations'; +import { ConvoHub } from '../../../../session/conversations'; import { UserUtils } from '../../../../session/utils'; import { SessionUtilUserProfile } from '../../../../session/utils/libsession/libsession_utils_user_profile'; import { TestUtils } from '../../../test-utils'; import { stubWindowLog } from '../../../test-utils/utils'; import { ConversationTypeEnum } from '../../../../models/types'; +import { NetworkTime } from '../../../../util/NetworkTime'; describe('libsession_user_profile', () => { stubWindowLog(); @@ -26,7 +26,7 @@ describe('libsession_user_profile', () => { } as ConversationAttributes; beforeEach(() => { - Sinon.stub(GetNetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); + Sinon.stub(NetworkTime, 'getLatestTimestampOffset').returns(getLatestTimestampOffset); Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(ourNumber); TestUtils.stubLibSessionWorker(undefined); }); @@ -62,7 +62,7 @@ describe('libsession_user_profile', () => { ...validArgs, ...contactArgs, } as ConversationAttributes); - Sinon.stub(getConversationController(), 'get').returns(contact); + Sinon.stub(ConvoHub.use(), 'get').returns(contact); Sinon.stub(SessionUtilUserProfile, 'isUserProfileToStoreInWrapper').returns(true); const wrapperUserProfile = @@ -100,7 +100,7 @@ describe('libsession_user_profile', () => { ...contactArgs, id: TestUtils.generateFakePubKeyStr(), } as ConversationAttributes); - Sinon.stub(getConversationController(), 'get').returns(contact); + Sinon.stub(ConvoHub.use(), 'get').returns(contact); Sinon.stub(SessionUtilUserProfile, 'isUserProfileToStoreInWrapper').returns(true); try { @@ -117,7 +117,7 @@ describe('libsession_user_profile', () => { expireTimer: 300, id: ourNumber, }); - Sinon.stub(getConversationController(), 'get').returns(contact); + Sinon.stub(ConvoHub.use(), 'get').returns(contact); Sinon.stub(SessionUtilUserProfile, 'isUserProfileToStoreInWrapper').returns(true); const wrapperUserProfile = diff --git a/ts/test/session/unit/libsession_wrapper/libsession_wrapper_usergroups_test.ts b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_usergroups_test.ts new file mode 100644 index 0000000000..c2ec8a0092 --- /dev/null +++ b/ts/test/session/unit/libsession_wrapper/libsession_wrapper_usergroups_test.ts @@ -0,0 +1,141 @@ +import { expect } from 'chai'; + +import Sinon from 'sinon'; +import { ConversationModel } from '../../../../models/conversation'; +import { CONVERSATION_PRIORITIES, ConversationTypeEnum } from '../../../../models/types'; +import { UserUtils } from '../../../../session/utils'; +import { SessionUtilUserGroups } from '../../../../session/utils/libsession/libsession_utils_user_groups'; +import { TestUtils } from '../../../test-utils'; + +describe('libsession_groups', () => { + describe('filter user groups for wrapper', () => { + const ourNumber = '051234567890acbdef'; + const validArgs = { + id: 'http://example.org/roomId1234', + type: ConversationTypeEnum.GROUP, + active_at: 1234, + }; + beforeEach(() => { + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(ourNumber); + }); + afterEach(() => { + Sinon.restore(); + }); + + describe('communities', () => { + it('includes public group/community', () => { + expect( + SessionUtilUserGroups.isUserGroupToStoreInWrapper( + new ConversationModel({ ...validArgs } as any) + ) + ).to.be.eq(true); + }); + + it('excludes public group/community inactive', () => { + expect( + SessionUtilUserGroups.isUserGroupToStoreInWrapper( + new ConversationModel({ ...validArgs, active_at: undefined } as any) + ) + ).to.be.eq(false); + }); + }); + + describe('legacy closed groups', () => { + const validLegacyGroupArgs = { + ...validArgs, + type: ConversationTypeEnum.GROUP, + id: TestUtils.generateFakePubKeyStr(), + } as any; + + it('includes legacy group', () => { + expect( + SessionUtilUserGroups.isUserGroupToStoreInWrapper( + new ConversationModel({ + ...validLegacyGroupArgs, + }) + ) + ).to.be.eq(true); + }); + + it('exclude legacy group left', () => { + // we cannot have a left group anymore. It's removed entirely when we leave it + }); + it('exclude legacy group kicked', () => { + expect( + SessionUtilUserGroups.isUserGroupToStoreInWrapper( + new ConversationModel({ + ...validLegacyGroupArgs, + isKickedFromGroup: true, + }) + ) + ).to.be.eq(false); + }); + + it('exclude legacy group not active', () => { + expect( + SessionUtilUserGroups.isUserGroupToStoreInWrapper( + new ConversationModel({ + ...validLegacyGroupArgs, + active_at: undefined, + }) + ) + ).to.be.eq(false); + }); + + it('include hidden legacy group', () => { + expect( + SessionUtilUserGroups.isUserGroupToStoreInWrapper( + new ConversationModel({ + ...validLegacyGroupArgs, + priority: CONVERSATION_PRIORITIES.hidden, + }) + ) + ).to.be.eq(true); + }); + }); + + it('excludes closed group v3 (for now)', () => { + expect( + SessionUtilUserGroups.isUserGroupToStoreInWrapper( + new ConversationModel({ + ...validArgs, + type: ConversationTypeEnum.GROUPV2, + id: '03123456564', + } as any) + ) + ).to.be.eq(false); + }); + + it('excludes empty id', () => { + expect( + SessionUtilUserGroups.isUserGroupToStoreInWrapper( + new ConversationModel({ + ...validArgs, + id: '', + } as any) + ) + ).to.be.eq(false); + + expect( + SessionUtilUserGroups.isUserGroupToStoreInWrapper( + new ConversationModel({ + ...validArgs, + id: '9871', + } as any) + ) + ).to.be.eq(false); + }); + + it('excludes private', () => { + expect( + SessionUtilUserGroups.isUserGroupToStoreInWrapper( + new ConversationModel({ + ...validArgs, + id: '0511111', + type: ConversationTypeEnum.PRIVATE, + } as any) + ) + ).to.be.eq(false); + }); + }); +}); diff --git a/ts/test/session/unit/messages/ChatMessage_test.ts b/ts/test/session/unit/messages/ChatMessage_test.ts index d2f525b1a0..b1efc9425e 100644 --- a/ts/test/session/unit/messages/ChatMessage_test.ts +++ b/ts/test/session/unit/messages/ChatMessage_test.ts @@ -1,4 +1,5 @@ import { expect } from 'chai'; +// eslint-disable-next-line import/order import { TextEncoder } from 'util'; import { toNumber } from 'lodash'; @@ -19,7 +20,7 @@ const sharedNoExpire = { describe('VisibleMessage', () => { it('can create empty message with just a timestamp', () => { const message = new VisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), ...sharedNoExpire, }); const plainText = message.plainTextBuffer(); @@ -30,7 +31,7 @@ describe('VisibleMessage', () => { it('can create message with a body', () => { const message = new VisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), body: 'body', ...sharedNoExpire, }); @@ -41,7 +42,7 @@ describe('VisibleMessage', () => { it('can create a disappear after read message', () => { const message = new VisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), ...sharedNoExpire, expirationType: 'deleteAfterRead', expireTimer: 300, @@ -60,7 +61,7 @@ describe('VisibleMessage', () => { it('can create a disappear after send message', () => { const message = new VisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), ...sharedNoExpire, expirationType: 'deleteAfterSend', expireTimer: 60, @@ -86,7 +87,7 @@ describe('VisibleMessage', () => { profileKey, }; const message = new VisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), lokiProfile, ...sharedNoExpire, }); @@ -106,7 +107,7 @@ describe('VisibleMessage', () => { it('can create message with a quote without attachments', () => { const quote: Quote = { id: 1234, author: 'author', text: 'text' }; const message = new VisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), quote, ...sharedNoExpire, }); @@ -124,7 +125,7 @@ describe('VisibleMessage', () => { previews.push(preview); const message = new VisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), preview: previews, ...sharedNoExpire, }); @@ -147,7 +148,7 @@ describe('VisibleMessage', () => { attachments.push(attachment); const message = new VisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), attachments, ...sharedNoExpire, }); @@ -163,7 +164,7 @@ describe('VisibleMessage', () => { it('correct ttl', () => { const message = new VisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), ...sharedNoExpire, }); expect(message.ttl()).to.equal(Constants.TTL_DEFAULT.CONTENT_MESSAGE); @@ -171,10 +172,9 @@ describe('VisibleMessage', () => { it('has an identifier', () => { const message = new VisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), ...sharedNoExpire, }); - expect(message.identifier).to.not.equal(null, 'identifier cannot be null'); expect(message.identifier).to.not.equal(undefined, 'identifier cannot be undefined'); }); diff --git a/ts/test/session/unit/messages/ConfigurationMessage_test.ts b/ts/test/session/unit/messages/ConfigurationMessage_test.ts deleted file mode 100644 index e0d86073ea..0000000000 --- a/ts/test/session/unit/messages/ConfigurationMessage_test.ts +++ /dev/null @@ -1,251 +0,0 @@ -import { expect } from 'chai'; -import { ECKeyPair } from '../../../../receiver/keypairs'; -import { TTL_DEFAULT } from '../../../../session/constants'; - -import { - ConfigurationMessage, - ConfigurationMessageClosedGroup, - ConfigurationMessageContact, -} from '../../../../session/messages/outgoing/controlMessage/ConfigurationMessage'; -import { TestUtils } from '../../../test-utils'; - -describe('ConfigurationMessage', () => { - it('throw if closed group is not set', () => { - const activeClosedGroups = null as any; - const params = { - activeClosedGroups, - activeOpenGroups: [], - timestamp: Date.now(), - displayName: 'displayName', - contacts: [], - }; - expect(() => new ConfigurationMessage(params)).to.throw('closed group must be set'); - }); - - it('throw if open group is not set', () => { - const activeOpenGroups = null as any; - const params = { - activeClosedGroups: [], - activeOpenGroups, - timestamp: Date.now(), - displayName: 'displayName', - contacts: [], - }; - expect(() => new ConfigurationMessage(params)).to.throw('open group must be set'); - }); - - it('throw if display name is not set', () => { - const params = { - activeClosedGroups: [], - activeOpenGroups: [], - timestamp: Date.now(), - displayName: undefined as any, - contacts: [], - }; - expect(() => new ConfigurationMessage(params)).to.throw('displayName must be set'); - }); - - it('throw if display name is set but empty', () => { - const params = { - activeClosedGroups: [], - activeOpenGroups: [], - timestamp: Date.now(), - displayName: undefined as any, - contacts: [], - }; - expect(() => new ConfigurationMessage(params)).to.throw('displayName must be set'); - }); - - it('ttl is 4 days', () => { - const params = { - activeClosedGroups: [], - activeOpenGroups: [], - timestamp: Date.now(), - displayName: 'displayName', - contacts: [], - }; - const configMessage = new ConfigurationMessage(params); - expect(configMessage.ttl()).to.be.equal(TTL_DEFAULT.CONTENT_MESSAGE); - }); - - describe('ConfigurationMessageClosedGroup', () => { - it('throw if closed group has no encryptionkeypair', () => { - const member = TestUtils.generateFakePubKey().key; - const params = { - publicKey: TestUtils.generateFakePubKey().key, - name: 'groupname', - members: [member], - admins: [member], - encryptionKeyPair: undefined as any, - }; - - expect(() => new ConfigurationMessageClosedGroup(params)).to.throw( - 'Encryption key pair looks invalid' - ); - }); - - it('throw if closed group has invalid encryptionkeypair', () => { - const member = TestUtils.generateFakePubKey().key; - const params = { - publicKey: TestUtils.generateFakePubKey().key, - name: 'groupname', - members: [member], - admins: [member], - encryptionKeyPair: new ECKeyPair(new Uint8Array(), new Uint8Array()), - }; - - expect(() => new ConfigurationMessageClosedGroup(params)).to.throw( - 'Encryption key pair looks invalid' - ); - }); - - it('throw if closed group has invalid pubkey', () => { - const member = TestUtils.generateFakePubKey().key; - const params = { - publicKey: 'invalidpubkey', - name: 'groupname', - members: [member], - admins: [member], - encryptionKeyPair: TestUtils.generateFakeECKeyPair(), - }; - - expect(() => new ConfigurationMessageClosedGroup(params)).to.throw(); - }); - - it('throw if closed group has invalid name', () => { - const member = TestUtils.generateFakePubKey().key; - const params = { - publicKey: TestUtils.generateFakePubKey().key, - name: '', - members: [member], - admins: [member], - encryptionKeyPair: TestUtils.generateFakeECKeyPair(), - }; - - expect(() => new ConfigurationMessageClosedGroup(params)).to.throw('name must be set'); - }); - - it('throw if members is empty', () => { - const member = TestUtils.generateFakePubKey().key; - const params = { - publicKey: TestUtils.generateFakePubKey().key, - name: 'groupname', - members: [], - admins: [member], - encryptionKeyPair: TestUtils.generateFakeECKeyPair(), - }; - - expect(() => new ConfigurationMessageClosedGroup(params)).to.throw('members must be set'); - }); - - it('throw if admins is empty', () => { - const member = TestUtils.generateFakePubKey().key; - const params = { - publicKey: TestUtils.generateFakePubKey().key, - name: 'groupname', - members: [member], - admins: [], - encryptionKeyPair: TestUtils.generateFakeECKeyPair(), - }; - - expect(() => new ConfigurationMessageClosedGroup(params)).to.throw('admins must be set'); - }); - - it('throw if some admins are not members', () => { - const member = TestUtils.generateFakePubKey().key; - const admin = TestUtils.generateFakePubKey().key; - const params = { - publicKey: TestUtils.generateFakePubKey().key, - name: 'groupname', - members: [member], - admins: [admin], - encryptionKeyPair: TestUtils.generateFakeECKeyPair(), - }; - - expect(() => new ConfigurationMessageClosedGroup(params)).to.throw( - 'some admins are not members' - ); - }); - }); - - describe('ConfigurationMessageContact', () => { - it('throws if contacts is not set', () => { - const params = { - activeClosedGroups: [], - activeOpenGroups: [], - timestamp: Date.now(), - displayName: 'displayName', - contacts: undefined as any, - }; - expect(() => new ConfigurationMessage(params)).to.throw('contacts must be set'); - }); - it('throw if some admins are not members', () => { - const member = TestUtils.generateFakePubKey().key; - const admin = TestUtils.generateFakePubKey().key; - const params = { - publicKey: TestUtils.generateFakePubKey().key, - name: 'groupname', - members: [member], - admins: [admin], - encryptionKeyPair: TestUtils.generateFakeECKeyPair(), - }; - - expect(() => new ConfigurationMessageClosedGroup(params)).to.throw( - 'some admins are not members' - ); - }); - - it('throw if the contact has not a valid pubkey', () => { - const params = { - publicKey: '05', - displayName: 'contactDisplayName', - }; - - expect(() => new ConfigurationMessageContact(params)).to.throw(); - - const params2 = { - publicKey: undefined as any, - displayName: 'contactDisplayName', - }; - - expect(() => new ConfigurationMessageContact(params2)).to.throw(); - }); - - it('throw if the contact has an empty display name', () => { - // a display name cannot be empty nor undefined - - expect(() => new ConfigurationMessageContact(params2)).to.throw(); - - const params2 = { - publicKey: TestUtils.generateFakePubKey().key, - displayName: '', - }; - - expect(() => new ConfigurationMessageContact(params2)).to.throw(); - }); - - it('throw if the contact has a profilePictureURL set but empty', () => { - const params = { - publicKey: TestUtils.generateFakePubKey().key, - displayName: 'contactDisplayName', - profilePictureURL: '', - }; - - expect(() => new ConfigurationMessageContact(params)).to.throw( - 'profilePictureURL must either undefined or not empty' - ); - }); - - it('throw if the contact has a profileKey set but empty', () => { - const params = { - publicKey: TestUtils.generateFakePubKey().key, - displayName: 'contactDisplayName', - profileKey: new Uint8Array(), - }; - - expect(() => new ConfigurationMessageContact(params)).to.throw( - 'profileKey must either undefined or not empty' - ); - }); - }); -}); diff --git a/ts/test/session/unit/messages/GroupInvitationMessage_test.ts b/ts/test/session/unit/messages/GroupInvitationMessage_test.ts index 0210baf63e..3e7770ab67 100644 --- a/ts/test/session/unit/messages/GroupInvitationMessage_test.ts +++ b/ts/test/session/unit/messages/GroupInvitationMessage_test.ts @@ -7,13 +7,13 @@ import { GroupInvitationMessage } from '../../../../session/messages/outgoing/vi describe('GroupInvitationMessage', () => { let message: GroupInvitationMessage; - const timestamp = Date.now(); + const createAtNetworkTimestamp = Date.now(); const url = 'http://localhost'; const name = 'test'; beforeEach(() => { message = new GroupInvitationMessage({ - timestamp, + createAtNetworkTimestamp, url, name, expirationType: null, diff --git a/ts/test/session/unit/messages/MessageRequestResponse_test.ts b/ts/test/session/unit/messages/MessageRequestResponse_test.ts index 744ac32add..386365648f 100644 --- a/ts/test/session/unit/messages/MessageRequestResponse_test.ts +++ b/ts/test/session/unit/messages/MessageRequestResponse_test.ts @@ -9,7 +9,7 @@ describe('MessageRequestResponse', () => { let message: MessageRequestResponse | undefined; it('correct ttl', () => { message = new MessageRequestResponse({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), }); expect(message.ttl()).to.equal(Constants.TTL_DEFAULT.CONTENT_MESSAGE); @@ -17,7 +17,7 @@ describe('MessageRequestResponse', () => { it('has an identifier', () => { message = new MessageRequestResponse({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), }); expect(message.identifier).to.not.equal(null, 'identifier cannot be null'); @@ -27,7 +27,7 @@ describe('MessageRequestResponse', () => { it('has an identifier matching if given', () => { const identifier = v4(); message = new MessageRequestResponse({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), identifier, }); @@ -36,7 +36,7 @@ describe('MessageRequestResponse', () => { it('isApproved is always true', () => { message = new MessageRequestResponse({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), }); const plainText = message.plainTextBuffer(); const decoded = SignalService.Content.decode(plainText); @@ -47,7 +47,7 @@ describe('MessageRequestResponse', () => { it('can create response without lokiProfile', () => { message = new MessageRequestResponse({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), }); const plainText = message.plainTextBuffer(); const decoded = SignalService.Content.decode(plainText); @@ -58,7 +58,7 @@ describe('MessageRequestResponse', () => { it('can create response with display name only', () => { message = new MessageRequestResponse({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), lokiProfile: { displayName: 'Jane', profileKey: null }, }); const plainText = message.plainTextBuffer(); @@ -71,7 +71,7 @@ describe('MessageRequestResponse', () => { it('empty profileKey does not get included', () => { message = new MessageRequestResponse({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), lokiProfile: { displayName: 'Jane', profileKey: new Uint8Array(0) }, }); const plainText = message.plainTextBuffer(); @@ -85,7 +85,7 @@ describe('MessageRequestResponse', () => { it('can create response with display name and profileKey and profileImage', () => { message = new MessageRequestResponse({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), lokiProfile: { displayName: 'Jane', profileKey: new Uint8Array([1, 2, 3, 4, 5, 6]), @@ -117,7 +117,7 @@ describe('MessageRequestResponse', () => { it('profileKey not included if profileUrl not set', () => { message = new MessageRequestResponse({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), lokiProfile: { displayName: 'Jane', profileKey: new Uint8Array([1, 2, 3, 4, 5, 6]) }, }); const plainText = message.plainTextBuffer(); @@ -135,7 +135,7 @@ describe('MessageRequestResponse', () => { it('url not included if profileKey not set', () => { message = new MessageRequestResponse({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), lokiProfile: { displayName: 'Jane', profileKey: null, diff --git a/ts/test/session/unit/messages/ReceiptMessage_test.ts b/ts/test/session/unit/messages/ReceiptMessage_test.ts index 425d6cc78c..027cc69cb0 100644 --- a/ts/test/session/unit/messages/ReceiptMessage_test.ts +++ b/ts/test/session/unit/messages/ReceiptMessage_test.ts @@ -12,8 +12,8 @@ describe('ReceiptMessage', () => { beforeEach(() => { timestamps = [987654321, 123456789]; - const timestamp = Date.now(); - readMessage = new ReadReceiptMessage({ timestamp, timestamps }); + const createAtNetworkTimestamp = Date.now(); + readMessage = new ReadReceiptMessage({ createAtNetworkTimestamp, timestamps }); }); it('content of a read receipt is correct', () => { diff --git a/ts/test/session/unit/messages/TypingMessage_test.ts b/ts/test/session/unit/messages/TypingMessage_test.ts index 621d5f8779..10828398f0 100644 --- a/ts/test/session/unit/messages/TypingMessage_test.ts +++ b/ts/test/session/unit/messages/TypingMessage_test.ts @@ -1,7 +1,6 @@ import { expect } from 'chai'; import Long from 'long'; -import { toNumber } from 'lodash'; import { SignalService } from '../../../../protobuf'; import { Constants } from '../../../../session'; import { TypingMessage } from '../../../../session/messages/outgoing/controlMessage/TypingMessage'; @@ -9,7 +8,7 @@ import { TypingMessage } from '../../../../session/messages/outgoing/controlMess describe('TypingMessage', () => { it('has Action.STARTED if isTyping = true', () => { const message = new TypingMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), isTyping: true, }); const plainText = message.plainTextBuffer(); @@ -22,7 +21,7 @@ describe('TypingMessage', () => { it('has Action.STOPPED if isTyping = false', () => { const message = new TypingMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), isTyping: false, }); const plainText = message.plainTextBuffer(); @@ -33,21 +32,9 @@ describe('TypingMessage', () => { ); }); - it('has typingTimestamp set if value passed', () => { - const message = new TypingMessage({ - timestamp: Date.now(), - isTyping: true, - typingTimestamp: 111111111, - }); - const plainText = message.plainTextBuffer(); - const decoded = SignalService.Content.decode(plainText); - const decodedtimestamp = toNumber(decoded.typingMessage?.timestamp); - expect(decodedtimestamp).to.be.equal(111111111); - }); - it('has typingTimestamp set with Date.now() if value not passed', () => { const message = new TypingMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), isTyping: true, }); const plainText = message.plainTextBuffer(); @@ -61,7 +48,7 @@ describe('TypingMessage', () => { it('correct ttl', () => { const message = new TypingMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), isTyping: true, }); expect(message.ttl()).to.equal(Constants.TTL_DEFAULT.TYPING_MESSAGE); @@ -69,7 +56,7 @@ describe('TypingMessage', () => { it('has an identifier', () => { const message = new TypingMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), isTyping: true, }); expect(message.identifier).to.not.equal(null, 'identifier cannot be null'); diff --git a/ts/test/session/unit/messages/closed_groups/ClosedGroupChatMessage_test.ts b/ts/test/session/unit/messages/closed_groups/ClosedGroupChatMessage_test.ts index 3cd04aae2d..309cf4b212 100644 --- a/ts/test/session/unit/messages/closed_groups/ClosedGroupChatMessage_test.ts +++ b/ts/test/session/unit/messages/closed_groups/ClosedGroupChatMessage_test.ts @@ -9,21 +9,20 @@ import { StringUtils } from '../../../../../session/utils'; import { TestUtils } from '../../../../test-utils'; describe('ClosedGroupVisibleMessage', () => { - let groupId: PubKey; + let groupId: string; beforeEach(() => { - groupId = TestUtils.generateFakePubKey(); + groupId = TestUtils.generateFakePubKeyStr(); }); it('can create empty message with timestamp, groupId and chatMessage', () => { - const timestamp = Date.now(); + const createAtNetworkTimestamp = Date.now(); const chatMessage = new VisibleMessage({ - timestamp, + createAtNetworkTimestamp, body: 'body', expirationType: null, expireTimer: null, }); const message = new ClosedGroupVisibleMessage({ groupId, - timestamp, chatMessage, }); const plainText = message.plainTextBuffer(); @@ -32,7 +31,7 @@ describe('ClosedGroupVisibleMessage', () => { .to.have.property('group') .to.have.deep.property( 'id', - new Uint8Array(StringUtils.encode(PubKey.PREFIX_GROUP_TEXTSECURE + groupId.key, 'utf8')) + new Uint8Array(StringUtils.encode(PubKey.PREFIX_GROUP_TEXTSECURE + groupId, 'utf8')) ); expect(decoded.dataMessage) .to.have.property('group') @@ -41,34 +40,32 @@ describe('ClosedGroupVisibleMessage', () => { expect(decoded.dataMessage).to.have.deep.property('body', 'body'); // we use the timestamp of the chatMessage as parent timestamp - expect(message).to.have.property('timestamp').to.be.equal(chatMessage.timestamp); + expect(message) + .to.have.property('createAtNetworkTimestamp') + .to.be.equal(chatMessage.createAtNetworkTimestamp); }); it('correct ttl', () => { - const timestamp = Date.now(); const chatMessage = new VisibleMessage({ - timestamp, + createAtNetworkTimestamp: Date.now(), expirationType: null, expireTimer: null, }); const message = new ClosedGroupVisibleMessage({ groupId, - timestamp, chatMessage, }); expect(message.ttl()).to.equal(Constants.TTL_DEFAULT.CONTENT_MESSAGE); }); it('has an identifier', () => { - const timestamp = Date.now(); const chatMessage = new VisibleMessage({ - timestamp, + createAtNetworkTimestamp: Date.now(), expirationType: null, expireTimer: null, }); const message = new ClosedGroupVisibleMessage({ groupId, - timestamp, chatMessage, }); expect(message.identifier).to.not.equal(null, 'identifier cannot be null'); @@ -76,27 +73,25 @@ describe('ClosedGroupVisibleMessage', () => { }); it('should use the identifier passed into it over the one set in chatMessage', () => { - const timestamp = Date.now(); + const createAtNetworkTimestamp = Date.now(); const chatMessage = new VisibleMessage({ - timestamp, + createAtNetworkTimestamp, body: 'body', - identifier: 'chatMessage', + identifier: 'closedGroupMessage', expirationType: null, expireTimer: null, }); const message = new ClosedGroupVisibleMessage({ groupId, - timestamp, chatMessage, - identifier: 'closedGroupMessage', }); expect(message.identifier).to.be.equal('closedGroupMessage'); }); it('should use the identifier of the chatMessage if one is not specified on the closed group message', () => { - const timestamp = Date.now(); + const createAtNetworkTimestamp = Date.now(); const chatMessage = new VisibleMessage({ - timestamp, + createAtNetworkTimestamp, body: 'body', identifier: 'chatMessage', expirationType: null, @@ -104,7 +99,6 @@ describe('ClosedGroupVisibleMessage', () => { }); const message = new ClosedGroupVisibleMessage({ groupId, - timestamp, chatMessage, }); expect(message.identifier).to.be.equal('chatMessage'); diff --git a/ts/test/session/unit/models/formatRowOfConversation_test.ts b/ts/test/session/unit/models/formatRowOfConversation_test.ts index cd54cf575b..504e528107 100644 --- a/ts/test/session/unit/models/formatRowOfConversation_test.ts +++ b/ts/test/session/unit/models/formatRowOfConversation_test.ts @@ -272,7 +272,7 @@ describe('formatRowOfConversation', () => { formatRowOfConversation( fillConvoAttributesWithDefaults({ id: '1234565', - type: ConversationTypeEnum.GROUPV3, + type: ConversationTypeEnum.GROUPV2, nickname: 'nickname', displayNameInProfile: 'displayNameInProfile', profileKey: '', diff --git a/ts/test/session/unit/onion/GuardNodes_test.ts b/ts/test/session/unit/onion/GuardNodes_test.ts index dee95c7726..caeae97d60 100644 --- a/ts/test/session/unit/onion/GuardNodes_test.ts +++ b/ts/test/session/unit/onion/GuardNodes_test.ts @@ -1,20 +1,21 @@ import chai from 'chai'; -import Sinon, * as sinon from 'sinon'; -import { describe } from 'mocha'; import chaiAsPromised from 'chai-as-promised'; +import { describe } from 'mocha'; +import Sinon, * as sinon from 'sinon'; import { TestUtils } from '../../../test-utils'; -import { Onions, SnodePool } from '../../../../session/apis/snode_api'; +import { Onions } from '../../../../session/apis/snode_api'; +import { SnodePool } from '../../../../session/apis/snode_api/snodePool'; +import { SeedNodeAPI } from '../../../../session/apis/seed_node_api'; import * as OnionPaths from '../../../../session/onions/onionPath'; import { generateFakeSnodes, generateFakeSnodeWithEdKey, stubData, } from '../../../test-utils/utils'; -import { SeedNodeAPI } from '../../../../session/apis/seed_node_api'; import { Snode } from '../../../../data/types'; -import { minSnodePoolCount } from '../../../../session/apis/snode_api/snodePool'; +import { SnodePoolConstants } from '../../../../session/apis/snode_api/snodePoolConstants'; chai.use(chaiAsPromised as any); chai.should(); @@ -100,11 +101,11 @@ describe('GuardNodes', () => { stubData('updateGuardNodes').resolves(); // run the command - let throwedError: string | undefined; + let error: string | undefined; try { await OnionPaths.selectGuardNodes(); } catch (e) { - throwedError = e.message; + error = e.message; } expect( @@ -118,12 +119,12 @@ describe('GuardNodes', () => { expect(testGuardNode.callCount, 'testGuardNode should have been called 12 times').to.be.eq( 12 ); - expect(throwedError).to.be.equal('selectGuardNodes stopping after attempts: 6'); + expect(error).to.be.equal('selectGuardNodes stopping after attempts: 6'); }); it('throws an error if we have to fetch from seed, fetch from seed enough snode but we still fail', async () => { - const invalidSndodePool = fakeSnodePool.slice(0, 11); - stubData('getSnodePoolFromDb').resolves(invalidSndodePool); + const invalidSnodePool = fakeSnodePool.slice(0, 11); + stubData('getSnodePoolFromDb').resolves(invalidSnodePool); TestUtils.stubWindow('getSeedNodeList', () => [{ url: 'whatever' }]); getSnodePoolFromDBOrFetchFromSeed = Sinon.stub( @@ -137,19 +138,19 @@ describe('GuardNodes', () => { stubData('updateGuardNodes').resolves(); // run the command - let throwedError: string | undefined; + let error: string | undefined; try { await OnionPaths.selectGuardNodes(); } catch (e) { - throwedError = e.message; + error = e.message; } - expect(throwedError).to.be.equal('selectGuardNodes stopping after attempts: 6'); + expect(error).to.be.equal('selectGuardNodes stopping after attempts: 6'); }); - it('returns valid guardnode if we have to fetch from seed, fetch from seed enough snodes but guard node tests passes', async () => { - const invalidSndodePool = fakeSnodePool.slice(0, 11); - stubData('getSnodePoolFromDb').resolves(invalidSndodePool); + it('returns valid guard node if we have to fetch from seed, fetch from seed enough snodes but guard node tests passes', async () => { + const invalidSnodePool = fakeSnodePool.slice(0, 11); + stubData('getSnodePoolFromDb').resolves(invalidSnodePool); TestUtils.stubWindow('getSeedNodeList', () => [{ url: 'whatever' }]); const testGuardNode = Sinon.stub(OnionPaths, 'testGuardNode').resolves(true); @@ -172,7 +173,7 @@ describe('GuardNodes', () => { }); it('throws if we have to fetch from seed, fetch from seed but not have enough fetched snodes', async () => { - const invalidLength = minSnodePoolCount - 1; + const invalidLength = SnodePoolConstants.minSnodePoolCount - 1; const invalidSnodePool = fakeSnodePool.slice(0, invalidLength); stubData('getSnodePoolFromDb').resolves(invalidSnodePool); TestUtils.stubWindow('getSeedNodeList', () => [{ url: 'whatever' }]); @@ -188,13 +189,13 @@ describe('GuardNodes', () => { stubData('updateGuardNodes').resolves(); // run the command - let throwedError: string | undefined; + let error: string | undefined; try { await OnionPaths.selectGuardNodes(); } catch (e) { - throwedError = e.message; + error = e.message; } - expect(throwedError).to.be.equal( + expect(error).to.be.equal( 'Could not select guard nodes. Not enough nodes in the pool: 7' // this is invalidLength but we want this test to fail if we change minSnodePoolCount ); }); diff --git a/ts/test/session/unit/onion/OnionErrors_test.ts b/ts/test/session/unit/onion/OnionErrors_test.ts index 59c70ed04a..a77b2123f5 100644 --- a/ts/test/session/unit/onion/OnionErrors_test.ts +++ b/ts/test/session/unit/onion/OnionErrors_test.ts @@ -16,6 +16,7 @@ import { Onions, OXEN_SERVER_ERROR, } from '../../../../session/apis/snode_api/onions'; +import { SnodePool } from '../../../../session/apis/snode_api/snodePool'; import { OnionPaths } from '../../../../session/onions'; import { pathFailureCount } from '../../../../session/onions/onionPath'; import { generateFakeSnodeWithEdKey, stubData } from '../../../test-utils/utils'; @@ -34,7 +35,7 @@ const getFakeResponseOnPath = (statusCode?: number, body?: string) => { const getFakeResponseOnDestination = (statusCode?: number, body?: string) => { return { - status: 200 || 0, + status: 200, text: async () => { return JSON.stringify({ status: statusCode, body: body || '' }); }, @@ -88,7 +89,7 @@ describe('OnionPathsErrors', () => { guardPubkeys[1], guardPubkeys[2], ]); - TestUtils.stubWindow('getSeedNodeList', () => ['seednode1']); + TestUtils.stubWindow('getSeedNodeList', () => ['whatever']); Sinon.stub(SeedNodeAPI, 'fetchSnodePoolFromSeedNodeWithRetries').resolves(fakeSnodePool); stubData('getSwarmNodesForPubkey').resolves(fakeSwarmForAssociatedWith); updateGuardNodesStub = stubData('updateGuardNodes').resolves(); @@ -97,8 +98,8 @@ describe('OnionPathsErrors', () => { updateSwarmSpy = stubData('updateSwarmNodesForPubkey').resolves(); stubData('getItemById').resolves({ id: SNODE_POOL_ITEM_ID, value: '' }); stubData('createOrUpdateItem').resolves(); - dropSnodeFromSnodePool = Sinon.spy(SnodeAPI.SnodePool, 'dropSnodeFromSnodePool'); - dropSnodeFromSwarmIfNeededSpy = Sinon.spy(SnodeAPI.SnodePool, 'dropSnodeFromSwarmIfNeeded'); + dropSnodeFromSnodePool = Sinon.spy(SnodePool, 'dropSnodeFromSnodePool'); + dropSnodeFromSwarmIfNeededSpy = Sinon.spy(SnodePool, 'dropSnodeFromSwarmIfNeeded'); dropSnodeFromPathSpy = Sinon.spy(OnionPaths, 'dropSnodeFromPath'); incrementBadPathCountOrDropSpy = Sinon.spy(OnionPaths, 'incrementBadPathCountOrDrop'); incrementBadSnodeCountOrDropSpy = Sinon.spy(Onions, 'incrementBadSnodeCountOrDrop'); @@ -110,12 +111,13 @@ describe('OnionPathsErrors', () => { await OnionPaths.getOnionPath({}); oldOnionPaths = OnionPaths.TEST_getTestOnionPath(); - Sinon.stub(Onions, 'decodeOnionResult').callsFake((_symkey: ArrayBuffer, plaintext: string) => - Promise.resolve({ - plaintext, - ciphertextBuffer: new Uint8Array(), - plaintextBuffer: Buffer.alloc(0), - }) + Sinon.stub(Onions, 'decodeOnionResult').callsFake( + (_symmetricKey: ArrayBuffer, plaintext: string) => + Promise.resolve({ + plaintext, + ciphertextBuffer: new Uint8Array(), + plaintextBuffer: Buffer.alloc(0), + }) ); }); @@ -133,6 +135,7 @@ describe('OnionPathsErrors', () => { symmetricKey: new Uint8Array(), guardNode: guardSnode1, abortSignal: abortController.signal, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -148,6 +151,7 @@ describe('OnionPathsErrors', () => { response: getFakeResponseOnDestination(200), symmetricKey: new Uint8Array(), guardNode: guardSnode1, + allow401s: false, }); throw new Error('Did not throw'); } catch (e) { @@ -166,6 +170,7 @@ describe('OnionPathsErrors', () => { response: getFakeResponseOnDestination(), symmetricKey: new Uint8Array(), guardNode: guardSnode1, + allow401s: false, }); throw new Error('Did not throw'); } catch (e) { @@ -185,6 +190,7 @@ describe('OnionPathsErrors', () => { response: getFakeResponseOnPath(406), symmetricKey: new Uint8Array(), guardNode: guardSnode1, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -206,6 +212,7 @@ describe('OnionPathsErrors', () => { response: getFakeResponseOnDestination(406), symmetricKey: new Uint8Array(), guardNode: guardSnode1, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -230,6 +237,7 @@ describe('OnionPathsErrors', () => { response: getFakeResponseOnPath(425), symmetricKey: new Uint8Array(), guardNode: guardSnode1, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -251,6 +259,7 @@ describe('OnionPathsErrors', () => { response: getFakeResponseOnDestination(425), symmetricKey: new Uint8Array(), guardNode: guardSnode1, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -281,6 +290,7 @@ describe('OnionPathsErrors', () => { destinationSnodeEd25519: targetNode, associatedWith, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -298,7 +308,7 @@ describe('OnionPathsErrors', () => { expect(dropSnodeFromSwarmIfNeededSpy.firstCall.args[0]).to.eq(associatedWith); expect(dropSnodeFromSwarmIfNeededSpy.firstCall.args[1]).to.eq(targetNode); - // this node failed only once. it should not be dropped yet from the snodepool + // this node failed only once. it should not be dropped yet from the snode pool expect(dropSnodeFromSnodePool.callCount).to.eq(0); expect(dropSnodeFromPathSpy.callCount).to.eq(0); expect(incrementBadPathCountOrDropSpy.callCount).to.eq(0); @@ -329,6 +339,7 @@ describe('OnionPathsErrors', () => { guardNode: guardSnode1, destinationSnodeEd25519: targetNode, associatedWith, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -341,8 +352,8 @@ describe('OnionPathsErrors', () => { // we got a new swarm for this pubkey. so it's OK that dropSnodeFromSwarm was not called for this pubkey - // this node failed only once. it should not be dropped yet from the snodepool - // this node failed only once. it should not be dropped yet from the snodepool + // this node failed only once. it should not be dropped yet from the snode pool + // this node failed only once. it should not be dropped yet from the snode pool expect(dropSnodeFromSnodePool.callCount).to.eq(0); expect(dropSnodeFromPathSpy.callCount).to.eq(0); expect(incrementBadPathCountOrDropSpy.callCount).to.eq(0); @@ -364,6 +375,7 @@ describe('OnionPathsErrors', () => { destinationSnodeEd25519: targetNode, associatedWith, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -379,7 +391,7 @@ describe('OnionPathsErrors', () => { expect(dropSnodeFromSwarmIfNeededSpy.callCount).to.eq(1); expect(dropSnodeFromSwarmIfNeededSpy.firstCall.args[0]).to.eq(associatedWith); expect(dropSnodeFromSwarmIfNeededSpy.firstCall.args[1]).to.eq(targetNode); - // this node failed only once. it should not be dropped yet from the snodepool + // this node failed only once. it should not be dropped yet from the snode pool expect(dropSnodeFromSnodePool.callCount).to.eq(0); expect(dropSnodeFromPathSpy.callCount).to.eq(0); expect(incrementBadPathCountOrDropSpy.callCount).to.eq(0); @@ -401,6 +413,7 @@ describe('OnionPathsErrors', () => { guardNode: guardSnode1, destinationSnodeEd25519: targetNode, associatedWith, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -418,7 +431,7 @@ describe('OnionPathsErrors', () => { expect(dropSnodeFromSwarmIfNeededSpy.firstCall.args[0]).to.eq(associatedWith); expect(dropSnodeFromSwarmIfNeededSpy.firstCall.args[1]).to.eq(targetNode); - // this node failed only once. it should not be dropped yet from the snodepool + // this node failed only once. it should not be dropped yet from the snode pool expect(dropSnodeFromSnodePool.callCount).to.eq(0); expect(dropSnodeFromPathSpy.callCount).to.eq(0); expect(incrementBadPathCountOrDropSpy.callCount).to.eq(0); @@ -436,7 +449,7 @@ describe('OnionPathsErrors', () => { * processOnionResponse OXEN SERVER ERROR */ describe('processOnionResponse - OXEN_SERVER_ERROR', () => { - // open group server v2 only talkes onion routing request. So errors can only happen at destination + // open group server v2 only talks onion routing request. So errors can only happen at destination it('throws a non-retryable error on oxen server errors on destination', async () => { const targetNode = otherNodesPubkeys[0]; @@ -448,6 +461,7 @@ describe('OnionPathsErrors', () => { destinationSnodeEd25519: targetNode, associatedWith, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -470,7 +484,7 @@ describe('OnionPathsErrors', () => { * processOnionResponse OXEN SERVER ERROR */ describe('processOnionResponse - 502 - node not found', () => { - // open group server v2 only talkes onion routing request. So errors can only happen at destination + // open group server v2 only talks onion routing request. So errors can only happen at destination it('throws a retryable error on 502 on intermediate snode', async () => { const targetNode = otherNodesPubkeys[0]; const failingSnode = oldOnionPaths[0][1]; @@ -484,6 +498,7 @@ describe('OnionPathsErrors', () => { guardNode: guardSnode1, destinationSnodeEd25519: targetNode, associatedWith, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -525,6 +540,7 @@ describe('OnionPathsErrors', () => { guardNode: guardSnode1, destinationSnodeEd25519: targetNode, associatedWith, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -565,6 +581,7 @@ describe('OnionPathsErrors', () => { guardNode: guardSnode1, destinationSnodeEd25519: targetNode, associatedWith, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -607,6 +624,7 @@ describe('OnionPathsErrors', () => { guardNode, destinationSnodeEd25519: targetNode, associatedWith, + allow401s: false, }); throw new Error('Error expected'); } catch (e) { @@ -615,7 +633,7 @@ describe('OnionPathsErrors', () => { if (index < 2) { expect(pathFailureCount[guardNode.pubkey_ed25519]).to.eq(index + 1); } else { - // pathFailureCount is reset once we hit 3 for this guardnode + // pathFailureCount is reset once we hit 3 for this guard node expect(pathFailureCount[guardNode.pubkey_ed25519]).to.eq(0); } } @@ -635,7 +653,7 @@ describe('OnionPathsErrors', () => { } expect(updateGuardNodesStub.callCount).to.eq(1); - // we dont know which snode failed so don't exclude any of those from swarms + // we don't know which snode failed so don't exclude any of those from swarms expect(dropSnodeFromSwarmIfNeededSpy.callCount).to.eq(0); expect(guardNode.pubkey_ed25519).to.eq(incrementBadPathCountOrDropSpy.args[0][0]); diff --git a/ts/test/session/unit/onion/OnionPaths_test.ts b/ts/test/session/unit/onion/OnionPaths_test.ts index 8bbec24d13..c525c4ea32 100644 --- a/ts/test/session/unit/onion/OnionPaths_test.ts +++ b/ts/test/session/unit/onion/OnionPaths_test.ts @@ -16,7 +16,7 @@ import { } from '../../../test-utils/utils'; import { SeedNodeAPI } from '../../../../session/apis/seed_node_api'; import { ServiceNodesList } from '../../../../session/apis/snode_api/getServiceNodesList'; -import { TEST_resetState } from '../../../../session/apis/snode_api/snodePool'; +import { SnodePool } from '../../../../session/apis/snode_api/snodePool'; chai.use(chaiAsPromised as any); chai.should(); @@ -134,7 +134,7 @@ describe('OnionPaths', () => { TestUtils.stubWindow('getSeedNodeList', () => ['seednode1']); TestUtils.stubWindowLog(); - TEST_resetState(); + SnodePool.TEST_resetState(); fetchSnodePoolFromSeedNodeWithRetries = Sinon.stub( SeedNodeAPI, diff --git a/ts/test/session/unit/onion/SeedNodeAPI_test.ts b/ts/test/session/unit/onion/SeedNodeAPI_test.ts index 108c47d481..4f2ef10be6 100644 --- a/ts/test/session/unit/onion/SeedNodeAPI_test.ts +++ b/ts/test/session/unit/onion/SeedNodeAPI_test.ts @@ -3,9 +3,11 @@ import chaiAsPromised from 'chai-as-promised'; import { describe } from 'mocha'; import Sinon from 'sinon'; -import { Onions, SnodePool } from '../../../../session/apis/snode_api'; +import { Onions } from '../../../../session/apis/snode_api'; import { TestUtils } from '../../../test-utils'; +import { SnodePool } from '../../../../session/apis/snode_api/snodePool'; + import { Snode } from '../../../../data/types'; import { SeedNodeAPI } from '../../../../session/apis/seed_node_api'; import { SnodeFromSeed } from '../../../../session/apis/seed_node_api/SeedNodeAPI'; diff --git a/ts/test/session/unit/onion/SnodeNamespace_test.ts b/ts/test/session/unit/onion/SnodeNamespace_test.ts index 11fd317801..6d491c5510 100644 --- a/ts/test/session/unit/onion/SnodeNamespace_test.ts +++ b/ts/test/session/unit/onion/SnodeNamespace_test.ts @@ -1,29 +1,45 @@ import { expect } from 'chai'; import Sinon from 'sinon'; -import { SnodeNamespace } from '../../../../session/apis/snode_api/namespaces'; +import { SnodeNamespace, SnodeNamespaces } from '../../../../session/apis/snode_api/namespaces'; -describe('Snode namespaces', () => { - describe('maxSizeMap', () => { - afterEach(() => { - Sinon.restore(); - }); +describe('maxSizeMap', () => { + afterEach(() => { + Sinon.restore(); + }); - it('single namespace 0 returns -1', () => { - expect(SnodeNamespace.maxSizeMap([0])).to.be.deep.eq([{ namespace: 0, maxSize: -1 }]); - }); + it('single namespace 0 returns -1', () => { + expect(SnodeNamespace.maxSizeMap([0])).to.be.deep.eq([{ namespace: 0, maxSize: -1 }]); + }); - it('single namespace config 5 returns -1', () => { - expect(SnodeNamespace.maxSizeMap([5])).to.be.deep.eq([{ namespace: 5, maxSize: -1 }]); - }); + it('single namespace config 5 returns -1', () => { + expect(SnodeNamespace.maxSizeMap([5])).to.be.deep.eq([{ namespace: 5, maxSize: -1 }]); + }); + + it('multiple namespaces config 0,2,3,4,5 returns [-2,-8,-8,-8,-8]', () => { + expect(SnodeNamespace.maxSizeMap([0, 2, 3, 4, 5])).to.be.deep.eq([ + { namespace: 0, maxSize: -2 }, // 0 has a priority of 10 so takes its own bucket + { namespace: 2, maxSize: -8 }, // the 4 other ones are sharing the next bucket + { namespace: 3, maxSize: -8 }, + { namespace: 4, maxSize: -8 }, + { namespace: 5, maxSize: -8 }, + ]); + }); - it('multiple namespaces config 0,2,3,4,5 returns [-2,-8,-8,-8,-8]', () => { - expect(SnodeNamespace.maxSizeMap([0, 2, 3, 4, 5])).to.be.deep.eq([ - { namespace: 0, maxSize: -2 }, // 0 has a priority of 10 so takes its own bucket - { namespace: 2, maxSize: -8 }, // the 4 other ones are sharing the next bucket - { namespace: 3, maxSize: -8 }, - { namespace: 4, maxSize: -8 }, - { namespace: 5, maxSize: -8 }, - ]); - }); + it('multiple namespaces config for is correct', () => { + expect( + SnodeNamespace.maxSizeMap([ + SnodeNamespaces.ClosedGroupMessages, + SnodeNamespaces.ClosedGroupInfo, + SnodeNamespaces.ClosedGroupMembers, + SnodeNamespaces.ClosedGroupKeys, + SnodeNamespaces.ClosedGroupRevokedRetrievableMessages, + ]) + ).to.be.deep.eq([ + { namespace: SnodeNamespaces.ClosedGroupMessages, maxSize: -2 }, // message has a priority of 10 so takes its own bucket + { namespace: SnodeNamespaces.ClosedGroupInfo, maxSize: -8 }, // the other ones are sharing the next bucket + { namespace: SnodeNamespaces.ClosedGroupMembers, maxSize: -8 }, + { namespace: SnodeNamespaces.ClosedGroupKeys, maxSize: -8 }, + { namespace: SnodeNamespaces.ClosedGroupRevokedRetrievableMessages, maxSize: -8 }, + ]); }); }); diff --git a/ts/test/session/unit/onion/SnodePoolUpdate_test.ts b/ts/test/session/unit/onion/SnodePoolUpdate_test.ts index afce0a7935..fbca7711bd 100644 --- a/ts/test/session/unit/onion/SnodePoolUpdate_test.ts +++ b/ts/test/session/unit/onion/SnodePoolUpdate_test.ts @@ -3,11 +3,13 @@ import chaiAsPromised from 'chai-as-promised'; import { describe } from 'mocha'; import Sinon, * as sinon from 'sinon'; -import { Snode } from '../../../../data/types'; -import { Onions, SnodePool } from '../../../../session/apis/snode_api'; +import { Onions } from '../../../../session/apis/snode_api'; import { TestUtils } from '../../../test-utils'; import { SeedNodeAPI } from '../../../../session/apis/seed_node_api'; +import { SnodePool } from '../../../../session/apis/snode_api/snodePool'; +import { Snode } from '../../../../data/types'; + import * as OnionPaths from '../../../../session/onions/onionPath'; import { generateFakeSnodes, diff --git a/ts/test/session/unit/reactions/ReactionMessage_test.ts b/ts/test/session/unit/reactions/ReactionMessage_test.ts index 256a736586..01be5a7f36 100644 --- a/ts/test/session/unit/reactions/ReactionMessage_test.ts +++ b/ts/test/session/unit/reactions/ReactionMessage_test.ts @@ -1,19 +1,20 @@ /* eslint-disable no-await-in-loop */ /* eslint-disable no-unused-expressions */ import chai, { expect } from 'chai'; -import Sinon, { useFakeTimers } from 'sinon'; -import { noop } from 'lodash'; import chaiAsPromised from 'chai-as-promised'; +import { noop } from 'lodash'; +import Sinon, { useFakeTimers } from 'sinon'; -import { Reactions } from '../../../../util/reactions'; import { Data } from '../../../../data/data'; +import { DEFAULT_RECENT_REACTS } from '../../../../session/constants'; +import { Reactions } from '../../../../util/reactions'; import * as Storage from '../../../../util/storage'; import { generateFakeIncomingPrivateMessage, stubWindowLog } from '../../../test-utils/utils'; -import { DEFAULT_RECENT_REACTS } from '../../../../session/constants'; -import { UserUtils } from '../../../../session/utils'; -import { SignalService } from '../../../../protobuf'; import { MessageCollection } from '../../../../models/message'; +import { SignalService } from '../../../../protobuf'; +import { UserUtils } from '../../../../session/utils'; +import { TestUtils } from '../../../test-utils'; chai.use(chaiAsPromised as any); @@ -21,7 +22,7 @@ describe('ReactionMessage', () => { stubWindowLog(); let clock: Sinon.SinonFakeTimers; - const ourNumber = '0123456789abcdef'; + const ourNumber = TestUtils.generateFakePubKeyStr(); const originalMessage = generateFakeIncomingPrivateMessage(); originalMessage.set('sent_at', Date.now()); diff --git a/ts/test/session/unit/receiving/ConfigurationMessage_test.ts b/ts/test/session/unit/receiving/ConfigurationMessage_test.ts deleted file mode 100644 index 9466c1d3b8..0000000000 --- a/ts/test/session/unit/receiving/ConfigurationMessage_test.ts +++ /dev/null @@ -1,97 +0,0 @@ -import chai from 'chai'; -import Sinon from 'sinon'; -import chaiAsPromised from 'chai-as-promised'; - -import { SignalService } from '../../../../protobuf'; - -import { ConfigurationMessage } from '../../../../session/messages/outgoing/controlMessage/ConfigurationMessage'; -import { UserUtils } from '../../../../session/utils'; -import { TestUtils } from '../../../test-utils'; - -import * as cache from '../../../../receiver/cache'; -import { EnvelopePlus } from '../../../../receiver/types'; - -import { ConfigMessageHandler } from '../../../../receiver/configMessage'; -import { ConfigurationSync } from '../../../../session/utils/job_runners/jobs/ConfigurationSyncJob'; -import { ReleasedFeatures } from '../../../../util/releaseFeature'; -import { stubData } from '../../../test-utils/utils'; - -chai.use(chaiAsPromised as any); -chai.should(); - -const { expect } = chai; - -describe('handleConfigurationMessageLegacy_receiving', () => { - let createOrUpdateStub: Sinon.SinonStub; - let getItemByIdStub: Sinon.SinonStub; - let sender: string; - - let envelope: EnvelopePlus; - let config: ConfigurationMessage; - - beforeEach(() => { - TestUtils.stubWindowFeatureFlags(); - Sinon.stub(cache, 'removeFromCache').resolves(); - sender = TestUtils.generateFakePubKey().key; - config = new ConfigurationMessage({ - activeOpenGroups: [], - activeClosedGroups: [], - timestamp: Date.now(), - identifier: 'identifier', - displayName: 'displayName', - contacts: [], - }); - Sinon.stub(ConfigurationSync, 'queueNewJobIfNeeded').resolves(); - TestUtils.stubWindow('setSettingValue', () => undefined); - }); - - afterEach(() => { - Sinon.restore(); - }); - - it('should not be processed if we do not have a pubkey', async () => { - TestUtils.stubWindowLog(); - Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').resolves(undefined); - - envelope = TestUtils.generateEnvelopePlus(sender); - - const proto = config.contentProto(); - createOrUpdateStub = stubData('createOrUpdateItem').resolves(); - getItemByIdStub = stubData('getItemById').resolves(); - const checkIsUserConfigFeatureReleasedStub = Sinon.stub( - ReleasedFeatures, - 'checkIsUserConfigFeatureReleased' - ).resolves(false); - await ConfigMessageHandler.handleConfigurationMessageLegacy( - envelope, - proto.configurationMessage as SignalService.ConfigurationMessage - ); - - expect(createOrUpdateStub.callCount).to.equal(0); - expect(getItemByIdStub.callCount).to.equal(0); - expect(checkIsUserConfigFeatureReleasedStub.callCount).to.be.eq(1); // should only have the one as part of the global legacy check, but none for the smaller handlers - }); - - describe('with ourNumber set', () => { - const ourNumber = TestUtils.generateFakePubKey().key; - - beforeEach(() => { - Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').resolves(ourNumber); - }); - - it('should not be processed if the message is not coming from our number', async () => { - const proto = config.contentProto(); - // sender !== ourNumber - envelope = TestUtils.generateEnvelopePlus(sender); - Sinon.stub(ReleasedFeatures, 'checkIsUserConfigFeatureReleased').resolves(false); - createOrUpdateStub = stubData('createOrUpdateItem').resolves(); - getItemByIdStub = stubData('getItemById').resolves(); - await ConfigMessageHandler.handleConfigurationMessageLegacy( - envelope, - proto.configurationMessage as SignalService.ConfigurationMessage - ); - expect(createOrUpdateStub.callCount).to.equal(0); - expect(getItemByIdStub.callCount).to.equal(0); - }); - }); -}); diff --git a/ts/test/session/unit/selectors/conversations_test.ts b/ts/test/session/unit/selectors/conversations_test.ts index 80c43ada60..8bf08156b8 100644 --- a/ts/test/session/unit/selectors/conversations_test.ts +++ b/ts/test/session/unit/selectors/conversations_test.ts @@ -33,7 +33,6 @@ describe('state/selectors/conversations', () => { isTyping: false, isBlocked: false, isKickedFromGroup: false, - left: false, isPublic: false, currentNotificationSetting: 'all', weAreAdmin: false, @@ -57,7 +56,6 @@ describe('state/selectors/conversations', () => { isTyping: false, isBlocked: false, isKickedFromGroup: false, - left: false, isPublic: false, currentNotificationSetting: 'all', weAreAdmin: false, @@ -80,7 +78,6 @@ describe('state/selectors/conversations', () => { isTyping: false, isBlocked: false, isKickedFromGroup: false, - left: false, isPublic: false, currentNotificationSetting: 'all', weAreAdmin: false, @@ -103,7 +100,6 @@ describe('state/selectors/conversations', () => { isTyping: false, isBlocked: false, isKickedFromGroup: false, - left: false, isPublic: false, currentNotificationSetting: 'all', weAreAdmin: false, @@ -126,13 +122,11 @@ describe('state/selectors/conversations', () => { isTyping: false, isBlocked: false, isKickedFromGroup: false, - left: false, isPublic: false, expireTimer: 0, currentNotificationSetting: 'all', weAreAdmin: false, isPrivate: false, - avatarPath: '', groupAdmins: [], lastMessage: undefined, @@ -165,7 +159,6 @@ describe('state/selectors/conversations', () => { isTyping: false, isBlocked: false, isKickedFromGroup: false, - left: false, expireTimer: 0, currentNotificationSetting: 'all', weAreAdmin: false, @@ -190,7 +183,6 @@ describe('state/selectors/conversations', () => { isTyping: false, isBlocked: false, isKickedFromGroup: false, - left: false, expireTimer: 0, currentNotificationSetting: 'all', weAreAdmin: false, @@ -215,7 +207,6 @@ describe('state/selectors/conversations', () => { isTyping: false, isBlocked: false, isKickedFromGroup: false, - left: false, expireTimer: 0, currentNotificationSetting: 'all', weAreAdmin: false, @@ -240,7 +231,6 @@ describe('state/selectors/conversations', () => { isTyping: false, isBlocked: false, isKickedFromGroup: false, - left: false, expireTimer: 0, currentNotificationSetting: 'all', weAreAdmin: false, @@ -264,7 +254,6 @@ describe('state/selectors/conversations', () => { isTyping: false, isBlocked: false, isKickedFromGroup: false, - left: false, expireTimer: 0, currentNotificationSetting: 'all', diff --git a/ts/test/session/unit/sending/MessageQueue_test.ts b/ts/test/session/unit/sending/MessageQueue_test.ts index c299cbd767..8d129e020e 100644 --- a/ts/test/session/unit/sending/MessageQueue_test.ts +++ b/ts/test/session/unit/sending/MessageQueue_test.ts @@ -6,25 +6,29 @@ /* eslint-disable no-await-in-loop */ /* eslint-disable no-unreachable-loop */ /* eslint-disable no-restricted-syntax */ -import { randomBytes } from 'crypto'; import chai from 'chai'; import chaiAsPromised from 'chai-as-promised'; import { describe } from 'mocha'; import Sinon, * as sinon from 'sinon'; +import { PubkeyType } from 'libsession_util_nodejs'; +import { randombytes_buf } from 'libsodium-wrappers-sumo'; import { ContentMessage } from '../../../../session/messages/outgoing'; import { ClosedGroupMessage } from '../../../../session/messages/outgoing/controlMessage/group/ClosedGroupMessage'; import { MessageSender } from '../../../../session/sending'; -import { MessageQueue } from '../../../../session/sending/MessageQueue'; +import { MessageQueueCl } from '../../../../session/sending/MessageQueue'; import { PubKey } from '../../../../session/types'; -import { GroupUtils, PromiseUtils, UserUtils } from '../../../../session/utils'; +import { PromiseUtils, UserUtils } from '../../../../session/utils'; import { TestUtils } from '../../../test-utils'; import { PendingMessageCacheStub } from '../../../test-utils/stubs'; import { SnodeNamespaces } from '../../../../session/apis/snode_api/namespaces'; import { MessageSentHandler } from '../../../../session/sending/MessageSentHandler'; -import { stubData } from '../../../test-utils/utils'; +import { TypedStub, generateFakeSnode, stubData } from '../../../test-utils/utils'; +import { MessageWrapper } from '../../../../session/sending/MessageWrapper'; +import { SnodePool } from '../../../../session/apis/snode_api/snodePool'; +import { BatchRequests } from '../../../../session/apis/snode_api/batchRequest'; chai.use(chaiAsPromised as any); chai.should(); @@ -34,14 +38,28 @@ const { expect } = chai; describe('MessageQueue', () => { // Initialize new stubbed cache const ourDevice = TestUtils.generateFakePubKey(); - const ourNumber = ourDevice.key; + const ourNumber = ourDevice.key as PubkeyType; // Initialize new stubbed queue let pendingMessageCache: PendingMessageCacheStub; - let messageSentHandlerFailedStub: sinon.SinonStub; - let messageSentHandlerSuccessStub: sinon.SinonStub; - let messageSentPublicHandlerSuccessStub: sinon.SinonStub; - let messageQueueStub: MessageQueue; + let messageSentHandlerFailedStub: TypedStub< + typeof MessageSentHandler, + 'handleSwarmMessageSentFailure' + >; + let messageSentHandlerSuccessStub: TypedStub< + typeof MessageSentHandler, + 'handleSwarmMessageSentSuccess' + >; + let messageSentPublicHandlerSuccessStub: TypedStub< + typeof MessageSentHandler, + 'handlePublicMessageSentSuccess' + >; + let handlePublicMessageSentFailureStub: TypedStub< + typeof MessageSentHandler, + 'handlePublicMessageSentFailure' + >; + + let messageQueueStub: MessageQueueCl; // Message Sender Stubs let sendStub: sinon.SinonStub; @@ -51,23 +69,27 @@ describe('MessageQueue', () => { Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(ourNumber); // Message Sender Stubs - sendStub = Sinon.stub(MessageSender, 'send'); + sendStub = Sinon.stub(MessageSender, 'sendSingleMessage'); messageSentHandlerFailedStub = Sinon.stub( MessageSentHandler, - 'handleMessageSentFailure' + 'handleSwarmMessageSentFailure' ).resolves(); messageSentHandlerSuccessStub = Sinon.stub( MessageSentHandler, - 'handleMessageSentSuccess' + 'handleSwarmMessageSentSuccess' ).resolves(); messageSentPublicHandlerSuccessStub = Sinon.stub( MessageSentHandler, 'handlePublicMessageSentSuccess' ).resolves(); + handlePublicMessageSentFailureStub = Sinon.stub( + MessageSentHandler, + 'handlePublicMessageSentFailure' + ).resolves(); // Init Queue pendingMessageCache = new PendingMessageCacheStub(); - messageQueueStub = new MessageQueue(pendingMessageCache); + messageQueueStub = new MessageQueueCl(pendingMessageCache); TestUtils.stubWindowLog(); }); @@ -107,7 +129,7 @@ describe('MessageQueue', () => { await pendingMessageCache.add( device, TestUtils.generateVisibleMessage(), - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); const initialMessages = await pendingMessageCache.getForDevice(device); @@ -125,11 +147,32 @@ describe('MessageQueue', () => { describe('events', () => { it('should send a success event if message was sent', done => { stubData('getMessageById').resolves(); + TestUtils.stubWindowLog(); const message = TestUtils.generateVisibleMessage(); - sendStub.resolves({ effectiveTimestamp: Date.now(), wrappedEnvelope: randomBytes(10) }); + sendStub.restore(); const device = TestUtils.generateFakePubKey(); + stubData('saveSeenMessageHashes').resolves(); Sinon.stub(MessageSender, 'getMinRetryTimeout').returns(10); + Sinon.stub(MessageSender, 'destinationIsClosedGroup').returns(false); + Sinon.stub(SnodePool, 'getNodeFromSwarmOrThrow').resolves(generateFakeSnode()); + Sinon.stub(BatchRequests, 'doUnsignedSnodeBatchRequestNoRetries').resolves([ + { + body: { t: message.createAtNetworkTimestamp, hash: 'whatever', code: 200 }, + code: 200, + }, + ]); + Sinon.stub(MessageWrapper, 'encryptMessagesAndWrap').resolves([ + { + encryptedAndWrappedData: randombytes_buf(100), + identifier: message.identifier, + isSyncMessage: false, + namespace: SnodeNamespaces.Default, + networkTimestamp: message.createAtNetworkTimestamp, + plainTextBuffer: message.plainTextBuffer(), + ttl: message.ttl(), + }, + ]); const waitForMessageSentEvent = async () => new Promise(resolve => { resolve(); @@ -140,12 +183,13 @@ describe('MessageQueue', () => { ); done(); } catch (e) { + console.warn('messageSentHandlerSuccessStub was not called, but should have been'); done(e); } }); void pendingMessageCache - .add(device, message, SnodeNamespaces.UserMessages, waitForMessageSentEvent) + .add(device, message, SnodeNamespaces.Default, waitForMessageSentEvent) .then(() => messageQueueStub.processPending(device)); }); @@ -155,7 +199,7 @@ describe('MessageQueue', () => { const device = TestUtils.generateFakePubKey(); const message = TestUtils.generateVisibleMessage(); void pendingMessageCache - .add(device, message, SnodeNamespaces.UserMessages) + .add(device, message, SnodeNamespaces.Default) .then(() => messageQueueStub.processPending(device)); // The cb is only invoke is all reties fails. Here we poll until the messageSentHandlerFailed was invoked as this is what we want to do @@ -183,7 +227,7 @@ describe('MessageQueue', () => { const stub = Sinon.stub(messageQueueStub as any, 'process').resolves(); const message = TestUtils.generateVisibleMessage(); - await messageQueueStub.sendToPubKey(device, message, SnodeNamespaces.UserMessages); + await messageQueueStub.sendToPubKey(device, message, SnodeNamespaces.Default); const args = stub.lastCall.args as [Array, ContentMessage]; expect(args[0]).to.be.equal(device); @@ -197,22 +241,19 @@ describe('MessageQueue', () => { return expect( messageQueueStub.sendToGroup({ message: chatMessage as any, - namespace: SnodeNamespaces.ClosedGroupMessage, + namespace: SnodeNamespaces.LegacyClosedGroup, }) ).to.be.rejectedWith('Invalid group message passed in sendToGroup.'); }); describe('closed groups', () => { it('can send to closed group', async () => { - const members = TestUtils.generateFakePubKeys(4); - Sinon.stub(GroupUtils, 'getGroupMembers').returns(members); - const send = Sinon.stub(messageQueueStub, 'sendToPubKey').resolves(); const message = TestUtils.generateClosedGroupMessage(); await messageQueueStub.sendToGroup({ message, - namespace: SnodeNamespaces.ClosedGroupMessage, + namespace: SnodeNamespaces.LegacyClosedGroup, }); expect(send.callCount).to.equal(1); @@ -269,6 +310,7 @@ describe('MessageQueue', () => { it('should emit a fail event if something went wrong', async () => { sendToOpenGroupV2Stub.resolves({ serverId: -1, serverTimestamp: -1 }); + stubData('getMessageById').resolves(); const message = TestUtils.generateOpenGroupVisibleMessage(); const roomInfos = TestUtils.generateOpenGroupV2RoomInfos(); @@ -278,8 +320,8 @@ describe('MessageQueue', () => { blinded: false, filesToLink: [], }); - expect(messageSentHandlerFailedStub.callCount).to.equal(1); - expect(messageSentHandlerFailedStub.lastCall.args[0].identifier).to.equal( + expect(handlePublicMessageSentFailureStub.callCount).to.equal(1); + expect(handlePublicMessageSentFailureStub.lastCall.args[0].identifier).to.equal( message.identifier ); }); diff --git a/ts/test/session/unit/sending/MessageSender_test.ts b/ts/test/session/unit/sending/MessageSender_test.ts index efbac68899..a8d21944a0 100644 --- a/ts/test/session/unit/sending/MessageSender_test.ts +++ b/ts/test/session/unit/sending/MessageSender_test.ts @@ -1,4 +1,5 @@ import { expect } from 'chai'; +// eslint-disable-next-line import/order import * as crypto from 'crypto'; import _ from 'lodash'; import Sinon, * as sinon from 'sinon'; @@ -6,20 +7,28 @@ import { SignalService } from '../../../../protobuf'; import { OpenGroupMessageV2 } from '../../../../session/apis/open_group_api/opengroupV2/OpenGroupMessageV2'; import { OpenGroupPollingUtils } from '../../../../session/apis/open_group_api/opengroupV2/OpenGroupPollingUtils'; import { SogsBlinding } from '../../../../session/apis/open_group_api/sogsv3/sogsBlinding'; -import { GetNetworkTime } from '../../../../session/apis/snode_api/getNetworkTime'; +import { BatchRequests } from '../../../../session/apis/snode_api/batchRequest'; import { SnodeNamespaces } from '../../../../session/apis/snode_api/namespaces'; import { Onions } from '../../../../session/apis/snode_api/onions'; -import { getConversationController } from '../../../../session/conversations/ConversationController'; -import { MessageEncrypter } from '../../../../session/crypto'; +import { ConvoHub } from '../../../../session/conversations/ConversationController'; import { OnionSending } from '../../../../session/onions/onionSend'; import { OnionV4 } from '../../../../session/onions/onionv4'; import { MessageSender } from '../../../../session/sending'; -import { PubKey, RawMessage } from '../../../../session/types'; +import { OutgoingRawMessage, PubKey } from '../../../../session/types'; import { MessageUtils, UserUtils } from '../../../../session/utils'; import { fromBase64ToArrayBuffer } from '../../../../session/utils/String'; import { TestUtils } from '../../../test-utils'; -import { stubCreateObjectUrl, stubData, stubUtilWorker } from '../../../test-utils/utils'; +import { + TypedStub, + expectAsyncToThrow, + stubCreateObjectUrl, + stubData, + stubUtilWorker, + stubValidSnodeSwarm, +} from '../../../test-utils/utils'; import { TEST_identityKeyPair } from '../crypto/MessageEncrypter_test'; +import { MessageEncrypter } from '../../../../session/crypto/MessageEncrypter'; +import { NetworkTime } from '../../../../util/NetworkTime'; describe('MessageSender', () => { afterEach(() => { @@ -29,22 +38,26 @@ describe('MessageSender', () => { beforeEach(async () => { TestUtils.stubWindowLog(); TestUtils.stubWindowFeatureFlags(); - getConversationController().reset(); + ConvoHub.use().reset(); TestUtils.stubData('getItemById').resolves(); stubData('getAllConversations').resolves([]); stubData('saveConversation').resolves(); - await getConversationController().load(); + await ConvoHub.use().load(); }); describe('send', () => { - const ourNumber = '0123456789abcdef'; - let sessionMessageAPISendStub: sinon.SinonStub; + const ourNumber = TestUtils.generateFakePubKeyStr(); + let sessionMessageAPISendStub: TypedStub; + let doSnodeBatchRequestStub: TypedStub; let encryptStub: sinon.SinonStub<[PubKey, Uint8Array, SignalService.Envelope.Type]>; beforeEach(() => { sessionMessageAPISendStub = Sinon.stub(MessageSender, 'sendMessagesDataToSnode').resolves(); - + doSnodeBatchRequestStub = Sinon.stub( + BatchRequests, + 'doSnodeBatchRequestNoRetries' + ).resolves(); stubData('getMessageById').resolves(); encryptStub = Sinon.stub(MessageEncrypter, 'encrypt').resolves({ @@ -56,60 +69,66 @@ describe('MessageSender', () => { }); describe('retry', () => { - let rawMessage: RawMessage; + let rawMessage: OutgoingRawMessage; beforeEach(async () => { rawMessage = await MessageUtils.toRawMessage( TestUtils.generateFakePubKey(), TestUtils.generateVisibleMessage(), - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); }); it('should not retry if an error occurred during encryption', async () => { - encryptStub.throws(new Error('Failed to encrypt.')); - const promise = MessageSender.send({ - message: rawMessage, - attempts: 3, - retryMinTimeout: 10, - isSyncMessage: false, - }); - await expect(promise).is.rejectedWith('Failed to encrypt.'); + encryptStub.throws(new Error('Failed to encrypt')); + + const promise = () => + MessageSender.sendSingleMessage({ + message: rawMessage, + attempts: 3, + retryMinTimeout: 10, + isSyncMessage: false, + }); + await expectAsyncToThrow(promise, 'Failed to encrypt'); expect(sessionMessageAPISendStub.callCount).to.equal(0); }); it('should only call lokiMessageAPI once if no errors occured', async () => { - await MessageSender.send({ + stubValidSnodeSwarm(); + await MessageSender.sendSingleMessage({ message: rawMessage, attempts: 3, retryMinTimeout: 10, isSyncMessage: false, }); - expect(sessionMessageAPISendStub.callCount).to.equal(1); + expect(doSnodeBatchRequestStub.callCount).to.equal(1); }); it('should only retry the specified amount of times before throwing', async () => { - sessionMessageAPISendStub.throws(new Error('API error')); + stubValidSnodeSwarm(); + + doSnodeBatchRequestStub.throws(new Error('API error')); const attempts = 2; - const promise = MessageSender.send({ + const promise = MessageSender.sendSingleMessage({ message: rawMessage, attempts, retryMinTimeout: 10, isSyncMessage: false, }); await expect(promise).is.rejectedWith('API error'); - expect(sessionMessageAPISendStub.callCount).to.equal(attempts); + expect(doSnodeBatchRequestStub.callCount).to.equal(attempts); }); it('should not throw error if successful send occurs within the retry limit', async () => { - sessionMessageAPISendStub.onFirstCall().throws(new Error('API error')); - await MessageSender.send({ + stubValidSnodeSwarm(); + doSnodeBatchRequestStub.onFirstCall().throws(new Error('API error')); + await MessageSender.sendSingleMessage({ message: rawMessage, attempts: 3, retryMinTimeout: 10, isSyncMessage: false, }); - expect(sessionMessageAPISendStub.callCount).to.equal(2); + expect(doSnodeBatchRequestStub.callCount).to.equal(2); }); }); @@ -124,57 +143,72 @@ describe('MessageSender', () => { }); it('should pass the correct values to lokiMessageAPI', async () => { + TestUtils.setupTestWithSending(); + const device = TestUtils.generateFakePubKey(); const visibleMessage = TestUtils.generateVisibleMessage(); - Sinon.stub(getConversationController(), 'get').returns(undefined as any); + Sinon.stub(ConvoHub.use(), 'get').returns(undefined as any); const rawMessage = await MessageUtils.toRawMessage( device, visibleMessage, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); - await MessageSender.send({ + await MessageSender.sendSingleMessage({ message: rawMessage, attempts: 3, retryMinTimeout: 10, isSyncMessage: false, }); - const args = sessionMessageAPISendStub.getCall(0).args; - expect(args[1]).to.equal(device.key); + const args = doSnodeBatchRequestStub.getCall(0).args; + + expect(args[3]).to.equal(device.key); const firstArg = args[0]; expect(firstArg.length).to.equal(1); + + if (firstArg[0].method !== 'store') { + throw new Error('expected a store request with data'); + } + // expect(args[3]).to.equal(visibleMessage.timestamp); the timestamp is overwritten on sending by the network clock offset - expect(firstArg[0].ttl).to.equal(visibleMessage.ttl()); - expect(firstArg[0].pubkey).to.equal(device.key); - expect(firstArg[0].namespace).to.equal(SnodeNamespaces.UserMessages); + expect(firstArg[0].params.ttl).to.equal(visibleMessage.ttl()); + expect(firstArg[0].params.pubkey).to.equal(device.key); + expect(firstArg[0].params.namespace).to.equal(SnodeNamespaces.Default); + // the request timestamp is always used fresh with the offset as the request will be denied with a 406 otherwise (clock out of sync) + expect(firstArg[0].params.timestamp).to.be.above(Date.now() - 10); + expect(firstArg[0].params.timestamp).to.be.below(Date.now() + 10); }); - it('should correctly build the envelope and override the timestamp', async () => { + it('should correctly build the envelope and override the request timestamp but not the msg one', async () => { + TestUtils.setupTestWithSending(); messageEncyrptReturnEnvelopeType = SignalService.Envelope.Type.SESSION_MESSAGE; // This test assumes the encryption stub returns the plainText passed into it. const device = TestUtils.generateFakePubKey(); - Sinon.stub(getConversationController(), 'get').returns(undefined as any); + Sinon.stub(ConvoHub.use(), 'get').returns(undefined as any); const visibleMessage = TestUtils.generateVisibleMessage(); const rawMessage = await MessageUtils.toRawMessage( device, visibleMessage, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); const offset = 200000; - Sinon.stub(GetNetworkTime, 'getLatestTimestampOffset').returns(offset); - await MessageSender.send({ + Sinon.stub(NetworkTime, 'getLatestTimestampOffset').returns(offset); + await MessageSender.sendSingleMessage({ message: rawMessage, attempts: 3, retryMinTimeout: 10, isSyncMessage: false, }); - const firstArg = sessionMessageAPISendStub.getCall(0).args[0]; - const { data64 } = firstArg[0]; - const data = fromBase64ToArrayBuffer(data64); + const firstArg = doSnodeBatchRequestStub.getCall(0).args[0]; + + if (firstArg[0].method !== 'store') { + throw new Error('expected a store request with data'); + } + const data = fromBase64ToArrayBuffer(firstArg[0].params.data); const webSocketMessage = SignalService.WebSocketMessage.decode(new Uint8Array(data)); expect(webSocketMessage.request?.body).to.not.equal( undefined, @@ -191,49 +225,41 @@ describe('MessageSender', () => { expect(envelope.type).to.equal(SignalService.Envelope.Type.SESSION_MESSAGE); expect(envelope.source).to.equal(''); - // the timestamp is overridden on sending with the network offset - const expectedTimestamp = Date.now() - offset; + // the timestamp in the message is not overridden on sending as it should be set with the network offset when created. + // we need that timestamp to not be overriden as the signature of the message depends on it. const decodedTimestampFromSending = _.toNumber(envelope.timestamp); - expect(decodedTimestampFromSending).to.be.above(expectedTimestamp - 10); - expect(decodedTimestampFromSending).to.be.below(expectedTimestamp + 10); - - // then make sure the plaintextBuffer was overridden too - const visibleMessageExpected = TestUtils.generateVisibleMessage({ - timestamp: decodedTimestampFromSending, - }); - const rawMessageExpected = await MessageUtils.toRawMessage( - device, - visibleMessageExpected, - 0 - ); + expect(decodedTimestampFromSending).to.be.eq(visibleMessage.createAtNetworkTimestamp); - expect(envelope.content).to.deep.equal(rawMessageExpected.plainTextBuffer); + // then, make sure that }); describe('SESSION_MESSAGE', () => { it('should set the envelope source to be empty', async () => { + TestUtils.setupTestWithSending(); messageEncyrptReturnEnvelopeType = SignalService.Envelope.Type.SESSION_MESSAGE; - Sinon.stub(getConversationController(), 'get').returns(undefined as any); + Sinon.stub(ConvoHub.use(), 'get').returns(undefined as any); // This test assumes the encryption stub returns the plainText passed into it. const device = TestUtils.generateFakePubKey(); - const visibleMessage = TestUtils.generateVisibleMessage(); const rawMessage = await MessageUtils.toRawMessage( device, visibleMessage, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); - await MessageSender.send({ + await MessageSender.sendSingleMessage({ message: rawMessage, attempts: 3, retryMinTimeout: 10, isSyncMessage: false, }); - const firstArg = sessionMessageAPISendStub.getCall(0).args[0]; - const { data64 } = firstArg[0]; - const data = fromBase64ToArrayBuffer(data64); + const firstArg = doSnodeBatchRequestStub.getCall(0).args[0]; + + if (firstArg[0].method !== 'store') { + throw new Error('expected a store request with data'); + } + const data = fromBase64ToArrayBuffer(firstArg[0].params.data); const webSocketMessage = SignalService.WebSocketMessage.decode(new Uint8Array(data)); expect(webSocketMessage.request?.body).to.not.equal( undefined, @@ -294,7 +320,7 @@ describe('MessageSender', () => { it('should call sendOnionRequestHandlingSnodeEjectStub', async () => { const sendOnionRequestHandlingSnodeEjectStub = Sinon.stub( Onions, - 'sendOnionRequestHandlingSnodeEject' + 'sendOnionRequestHandlingSnodeEjectNoRetries' ).resolves({} as any); Sinon.stub(OnionV4, 'decodeV4Response').returns({ metadata: { code: 200 }, @@ -302,6 +328,7 @@ describe('MessageSender', () => { bodyBinary: new Uint8Array(), bodyContentType: 'a', }); + Sinon.stub(OnionSending, 'getMinTimeoutForSogs').returns(5); const message = TestUtils.generateOpenGroupVisibleMessage(); const roomInfos = TestUtils.generateOpenGroupV2RoomInfos(); @@ -312,7 +339,9 @@ describe('MessageSender', () => { it('should retry sendOnionRequestHandlingSnodeEjectStub ', async () => { const message = TestUtils.generateOpenGroupVisibleMessage(); const roomInfos = TestUtils.generateOpenGroupV2RoomInfos(); - Sinon.stub(Onions, 'sendOnionRequestHandlingSnodeEject').resolves({} as any); + Sinon.stub(Onions, 'sendOnionRequestHandlingSnodeEjectNoRetries').resolves({} as any); + + Sinon.stub(OnionSending, 'getMinTimeoutForSogs').returns(5); const decodev4responseStub = Sinon.stub(OnionV4, 'decodeV4Response'); decodev4responseStub.throws('whate'); @@ -330,7 +359,8 @@ describe('MessageSender', () => { it('should not retry more than 3 sendOnionRequestHandlingSnodeEjectStub ', async () => { const message = TestUtils.generateOpenGroupVisibleMessage(); const roomInfos = TestUtils.generateOpenGroupV2RoomInfos(); - Sinon.stub(Onions, 'sendOnionRequestHandlingSnodeEject').resolves({} as any); + Sinon.stub(Onions, 'sendOnionRequestHandlingSnodeEjectNoRetries').resolves({} as any); + Sinon.stub(OnionSending, 'getMinTimeoutForSogs').returns(5); const decodev4responseStub = Sinon.stub(OnionV4, 'decodeV4Response'); decodev4responseStub.throws('whate'); diff --git a/ts/test/session/unit/sending/PendingMessageCache_test.ts b/ts/test/session/unit/sending/PendingMessageCache_test.ts index caa0310208..4ade1eaa3b 100644 --- a/ts/test/session/unit/sending/PendingMessageCache_test.ts +++ b/ts/test/session/unit/sending/PendingMessageCache_test.ts @@ -2,12 +2,12 @@ /* eslint-disable no-await-in-loop */ /* eslint-disable no-restricted-syntax */ import { expect } from 'chai'; -import Sinon from 'sinon'; import * as _ from 'lodash'; +import Sinon from 'sinon'; +import { SnodeNamespaces } from '../../../../session/apis/snode_api/namespaces'; +import { PendingMessageCache } from '../../../../session/sending/PendingMessageCache'; import { MessageUtils } from '../../../../session/utils'; import { TestUtils } from '../../../test-utils'; -import { PendingMessageCache } from '../../../../session/sending/PendingMessageCache'; -import { SnodeNamespaces } from '../../../../session/apis/snode_api/namespaces'; // Equivalent to Data.StorageItem interface StorageItem { @@ -58,13 +58,9 @@ describe('PendingMessageCache', () => { it('can add to cache', async () => { const device = TestUtils.generateFakePubKey(); const message = TestUtils.generateVisibleMessage(); - const rawMessage = await MessageUtils.toRawMessage( - device, - message, - SnodeNamespaces.UserMessages - ); + const rawMessage = await MessageUtils.toRawMessage(device, message, SnodeNamespaces.Default); - await pendingMessageCacheStub.add(device, message, SnodeNamespaces.UserMessages); + await pendingMessageCacheStub.add(device, message, SnodeNamespaces.Default); // Verify that the message is in the cache const finalCache = await pendingMessageCacheStub.getAllPending(); @@ -81,20 +77,20 @@ describe('PendingMessageCache', () => { await pendingMessageCacheStub.add( device, TestUtils.generateVisibleMessage(), - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); // We have to timeout here otherwise it's processed too fast and messages start having the same timestamp await TestUtils.timeout(5); await pendingMessageCacheStub.add( device, TestUtils.generateVisibleMessage(), - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); await TestUtils.timeout(5); await pendingMessageCacheStub.add( device, TestUtils.generateVisibleMessage(), - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); // Verify that the message is in the cache @@ -106,13 +102,9 @@ describe('PendingMessageCache', () => { it('can remove from cache', async () => { const device = TestUtils.generateFakePubKey(); const message = TestUtils.generateVisibleMessage(); - const rawMessage = await MessageUtils.toRawMessage( - device, - message, - SnodeNamespaces.UserMessages - ); + const rawMessage = await MessageUtils.toRawMessage(device, message, SnodeNamespaces.Default); - await pendingMessageCacheStub.add(device, message, SnodeNamespaces.UserMessages); + await pendingMessageCacheStub.add(device, message, SnodeNamespaces.Default); const initialCache = await pendingMessageCacheStub.getAllPending(); expect(initialCache).to.have.length(1); @@ -129,23 +121,19 @@ describe('PendingMessageCache', () => { it('should only remove messages with different identifier and device', async () => { const device = TestUtils.generateFakePubKey(); const message = TestUtils.generateVisibleMessage(); - const rawMessage = await MessageUtils.toRawMessage( - device, - message, - SnodeNamespaces.UserMessages - ); + const rawMessage = await MessageUtils.toRawMessage(device, message, SnodeNamespaces.Default); - await pendingMessageCacheStub.add(device, message, SnodeNamespaces.UserMessages); + await pendingMessageCacheStub.add(device, message, SnodeNamespaces.Default); await TestUtils.timeout(5); const one = await pendingMessageCacheStub.add( device, TestUtils.generateVisibleMessage(), - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); const two = await pendingMessageCacheStub.add( TestUtils.generateFakePubKey(), message, - SnodeNamespaces.UserMessages + SnodeNamespaces.Default ); const initialCache = await pendingMessageCacheStub.getAllPending(); @@ -178,7 +166,7 @@ describe('PendingMessageCache', () => { ]; for (const item of cacheItems) { - await pendingMessageCacheStub.add(item.device, item.message, SnodeNamespaces.UserMessages); + await pendingMessageCacheStub.add(item.device, item.message, SnodeNamespaces.Default); } const cache = await pendingMessageCacheStub.getAllPending(); @@ -206,7 +194,7 @@ describe('PendingMessageCache', () => { ]; for (const item of cacheItems) { - await pendingMessageCacheStub.add(item.device, item.message, SnodeNamespaces.UserMessages); + await pendingMessageCacheStub.add(item.device, item.message, SnodeNamespaces.Default); } const initialCache = await pendingMessageCacheStub.getAllPending(); @@ -223,11 +211,7 @@ describe('PendingMessageCache', () => { it('can find nothing when empty', async () => { const device = TestUtils.generateFakePubKey(); const message = TestUtils.generateVisibleMessage(); - const rawMessage = await MessageUtils.toRawMessage( - device, - message, - SnodeNamespaces.UserMessages - ); + const rawMessage = await MessageUtils.toRawMessage(device, message, SnodeNamespaces.Default); const foundMessage = pendingMessageCacheStub.find(rawMessage); expect(foundMessage, 'a message was found in empty cache').to.be.undefined; @@ -236,13 +220,9 @@ describe('PendingMessageCache', () => { it('can find message in cache', async () => { const device = TestUtils.generateFakePubKey(); const message = TestUtils.generateVisibleMessage(); - const rawMessage = await MessageUtils.toRawMessage( - device, - message, - SnodeNamespaces.UserMessages - ); + const rawMessage = await MessageUtils.toRawMessage(device, message, SnodeNamespaces.Default); - await pendingMessageCacheStub.add(device, message, SnodeNamespaces.UserMessages); + await pendingMessageCacheStub.add(device, message, SnodeNamespaces.Default); const finalCache = await pendingMessageCacheStub.getAllPending(); expect(finalCache).to.have.length(1); @@ -269,7 +249,7 @@ describe('PendingMessageCache', () => { ]; for (const item of cacheItems) { - await pendingMessageCacheStub.add(item.device, item.message, SnodeNamespaces.UserMessages); + await pendingMessageCacheStub.add(item.device, item.message, SnodeNamespaces.Default); } const initialCache = await pendingMessageCacheStub.getAllPending(); @@ -299,7 +279,7 @@ describe('PendingMessageCache', () => { ]; for (const item of cacheItems) { - await pendingMessageCacheStub.add(item.device, item.message, SnodeNamespaces.UserMessages); + await pendingMessageCacheStub.add(item.device, item.message, SnodeNamespaces.Default); } const addedMessages = await pendingMessageCacheStub.getAllPending(); @@ -320,8 +300,8 @@ describe('PendingMessageCache', () => { expect(buffersCompare).to.equal(true, 'buffers were not loaded properly from database'); // Compare all other valures - const trimmedAdded = _.omit(addedMessage, ['plainTextBuffer']); - const trimmedRebuilt = _.omit(message, ['plainTextBuffer']); + const trimmedAdded = _.omit(addedMessage, ['plainTextBuffer', 'plainTextBufferHex']); + const trimmedRebuilt = _.omit(message, ['plainTextBuffer', 'plainTextBufferHex']); expect(_.isEqual(trimmedAdded, trimmedRebuilt)).to.equal( true, diff --git a/ts/test/session/unit/snode_api/retrieveNextMessages_test.ts b/ts/test/session/unit/snode_api/retrieveNextMessages_test.ts new file mode 100644 index 0000000000..01f69feb21 --- /dev/null +++ b/ts/test/session/unit/snode_api/retrieveNextMessages_test.ts @@ -0,0 +1,447 @@ +import chai from 'chai'; +import { beforeEach, describe } from 'mocha'; +import Sinon from 'sinon'; + +import { GroupPubkeyType, PubkeyType, UserGroupsGet } from 'libsession_util_nodejs'; +import { + RetrieveGroupSubRequest, + RetrieveLegacyClosedGroupSubRequest, + RetrieveUserSubRequest, + UpdateExpiryOnNodeGroupSubRequest, + UpdateExpiryOnNodeUserSubRequest, +} from '../../../../session/apis/snode_api/SnodeRequestTypes'; +import { SnodeNamespaces } from '../../../../session/apis/snode_api/namespaces'; +import { SnodeAPIRetrieve } from '../../../../session/apis/snode_api/retrieveRequest'; +import { WithShortenOrExtend } from '../../../../session/apis/snode_api/types'; +import { TestUtils } from '../../../test-utils'; +import { expectAsyncToThrow, stubLibSessionWorker } from '../../../test-utils/utils'; +import { NetworkTime } from '../../../../util/NetworkTime'; + +const { expect } = chai; + +function expectRetrieveWith({ + request, + namespace, + lastHash, + maxSize, +}: { + request: RetrieveLegacyClosedGroupSubRequest | RetrieveUserSubRequest | RetrieveGroupSubRequest; + namespace: SnodeNamespaces; + lastHash: string | null; + maxSize: number; +}) { + expect(request.namespace).to.be.eq(namespace); + expect(request.last_hash).to.be.eq(lastHash); + expect(request.max_size).to.be.eq(maxSize); +} + +function expectExpireWith({ + request, + hashes, + shortenOrExtend, +}: { + request: UpdateExpiryOnNodeUserSubRequest | UpdateExpiryOnNodeGroupSubRequest; + hashes: Array; +} & WithShortenOrExtend) { + expect(request.messageHashes).to.be.deep.eq(hashes); + expect(request.shortenOrExtend).to.be.eq(shortenOrExtend); + expect(request.expiryMs).to.be.above(NetworkTime.now() + 14 * 24 * 3600 * 1000 - 100); + expect(request.expiryMs).to.be.above(NetworkTime.now() + 14 * 24 * 3600 * 1000 + 100); +} + +describe('SnodeAPI:buildRetrieveRequest', () => { + let us: PubkeyType; + beforeEach(async () => { + TestUtils.stubWindowLog(); + us = TestUtils.generateFakePubKeyStr(); + }); + + afterEach(() => { + Sinon.restore(); + }); + + describe('us', () => { + it('with single namespace and lasthash, no hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest( + [{ lastHash: 'lasthash', namespace: SnodeNamespaces.Default }], + us, + us, + null + ); + + expect(requests.length).to.be.eq(1); + const req = requests[0]; + if (req.method !== 'retrieve') { + throw new Error('expected retrieve method'); + } + expectRetrieveWith({ + request: req, + lastHash: 'lasthash', + maxSize: -1, + namespace: SnodeNamespaces.Default, + }); + }); + + it('with two namespace and lasthashes, no hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest( + [ + { lastHash: 'lasthash1', namespace: SnodeNamespaces.Default }, + { lastHash: 'lasthash2', namespace: SnodeNamespaces.UserContacts }, + ], + us, + us, + null + ); + + expect(requests.length).to.be.eq(2); + const req1 = requests[0]; + const req2 = requests[1]; + if (req1.method !== 'retrieve' || req2.method !== 'retrieve') { + throw new Error('expected retrieve method'); + } + + expectRetrieveWith({ + request: req1, + lastHash: 'lasthash1', + maxSize: -2, + namespace: SnodeNamespaces.Default, + }); + + expectRetrieveWith({ + request: req2, + lastHash: 'lasthash2', + maxSize: -2, + namespace: SnodeNamespaces.UserContacts, + }); + }); + + it('with two namespace and lasthashes, 2 hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest( + [ + { lastHash: 'lasthash1', namespace: SnodeNamespaces.Default }, + { lastHash: 'lasthash2', namespace: SnodeNamespaces.UserContacts }, + ], + us, + us, + ['hashbump1', 'hashbump2'] + ); + + expect(requests.length).to.be.eq(3); + const req1 = requests[0]; + const req2 = requests[1]; + const req3 = requests[2]; + if (req1.method !== 'retrieve' || req2.method !== 'retrieve') { + throw new Error('expected retrieve method'); + } + if (req3.method !== 'expire') { + throw new Error('expected expire method'); + } + + expectRetrieveWith({ + request: req1, + lastHash: 'lasthash1', + maxSize: -2, + namespace: SnodeNamespaces.Default, + }); + + expectRetrieveWith({ + request: req2, + lastHash: 'lasthash2', + maxSize: -2, + namespace: SnodeNamespaces.UserContacts, + }); + + expectExpireWith({ + request: req3, + hashes: ['hashbump1', 'hashbump2'], + shortenOrExtend: '', + }); + }); + + it('with 0 namespaces, 2 hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest([], us, us, [ + 'hashbump1', + 'hashbump2', + ]); + + expect(requests.length).to.be.eq(1); + const req1 = requests[0]; + if (req1.method !== 'expire') { + throw new Error('expected expire method'); + } + + expectExpireWith({ + request: req1, + hashes: ['hashbump1', 'hashbump2'], + shortenOrExtend: '', + }); + }); + + it('with 0 namespaces, 0 hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest([], us, us, []); + expect(requests.length).to.be.eq(0); + }); + it('with 0 namespaces, null hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest([], us, us, null); + expect(requests.length).to.be.eq(0); + }); + + it('throws if given an invalid user namespace to retrieve from ', async () => { + const pr = async () => + SnodeAPIRetrieve.buildRetrieveRequest( + [ + { lastHash: 'lasthash1', namespace: SnodeNamespaces.ClosedGroupKeys }, + { lastHash: 'lasthash2', namespace: SnodeNamespaces.UserContacts }, + ], + us, + us, + ['hashbump1', 'hashbump2'] + ); + + await expectAsyncToThrow( + pr, + `retrieveRequestForUs not a valid namespace to retrieve as us:${SnodeNamespaces.ClosedGroupKeys}` + ); + }); + }); + + describe('legacy group', () => { + let groupPk: PubkeyType; + beforeEach(() => { + groupPk = TestUtils.generateFakePubKeyStr(); + }); + it('with single namespace and lasthash, no hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest( + [{ lastHash: 'lasthash', namespace: SnodeNamespaces.LegacyClosedGroup }], + groupPk, + us, + null + ); + + expect(requests.length).to.be.eq(1); + const req = requests[0]; + if (req.method !== 'retrieve') { + throw new Error('expected retrieve method'); + } + expectRetrieveWith({ + request: req, + lastHash: 'lasthash', + maxSize: -1, + namespace: SnodeNamespaces.LegacyClosedGroup, + }); + }); + + it('with 1 namespace and lasthashes, 2 hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest( + [{ lastHash: 'lasthash1', namespace: SnodeNamespaces.LegacyClosedGroup }], + groupPk, + us, + ['hashbump1', 'hashbump2'] // legacy groups have not the possibility to bump the expire of messages + ); + + expect(requests.length).to.be.eq(1); + const req1 = requests[0]; + if (req1.method !== 'retrieve') { + throw new Error('expected retrieve/expire method'); + } + + expectRetrieveWith({ + request: req1, + lastHash: 'lasthash1', + maxSize: -1, + namespace: SnodeNamespaces.LegacyClosedGroup, + }); + }); + + it('with 0 namespaces, 2 hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest([], groupPk, us, [ + 'hashbump1', + 'hashbump2', + ]); + + expect(requests.length).to.be.eq(0); // legacy groups have not possibility to bump expire of messages + }); + + it('with 0 namespaces, 0 hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest([], groupPk, us, []); + expect(requests.length).to.be.eq(0); + }); + it('with 0 namespaces, null hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest([], groupPk, us, null); + expect(requests.length).to.be.eq(0); + }); + + it('throws if given an invalid legacy group namespace to retrieve from ', async () => { + const pr = async () => + SnodeAPIRetrieve.buildRetrieveRequest( + [ + { lastHash: 'lasthash1', namespace: SnodeNamespaces.ClosedGroupKeys }, + { lastHash: 'lasthash2', namespace: SnodeNamespaces.UserContacts }, + ], + groupPk, + us, + ['hashbump1', 'hashbump2'] + ); + + await expectAsyncToThrow( + pr, + `retrieveRequestForUs not a valid namespace to retrieve as us:${SnodeNamespaces.ClosedGroupKeys}` + ); + }); + }); + + describe('group v2', () => { + let groupPk: GroupPubkeyType; + beforeEach(() => { + groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + stubLibSessionWorker({}); + }); + it('with single namespace and lasthash, no hashesToBump ', async () => { + TestUtils.stubUserGroupWrapper('getGroup', { whatever: '' } as any as UserGroupsGet); + + const requests = await SnodeAPIRetrieve.buildRetrieveRequest( + [{ lastHash: 'lasthash', namespace: SnodeNamespaces.ClosedGroupInfo }], + groupPk, + us, + null + ); + + expect(requests.length).to.be.eq(1); + const req = requests[0]; + if (req.method !== 'retrieve') { + throw new Error('expected retrieve method'); + } + expectRetrieveWith({ + request: req, + lastHash: 'lasthash', + maxSize: -1, + namespace: SnodeNamespaces.ClosedGroupInfo, + }); + }); + + it('with two namespace and lasthashes, no hashesToBump ', async () => { + TestUtils.stubUserGroupWrapper('getGroup', { whatever: '' } as any as UserGroupsGet); + + const requests = await SnodeAPIRetrieve.buildRetrieveRequest( + [ + { lastHash: 'lasthash1', namespace: SnodeNamespaces.ClosedGroupInfo }, + { lastHash: 'lasthash2', namespace: SnodeNamespaces.ClosedGroupMessages }, + ], + groupPk, + us, + null + ); + + expect(requests.length).to.be.eq(2); + const req1 = requests[0]; + const req2 = requests[1]; + if (req1.method !== 'retrieve' || req2.method !== 'retrieve') { + throw new Error('expected retrieve method'); + } + + expectRetrieveWith({ + request: req1, + lastHash: 'lasthash1', + maxSize: -2, + namespace: SnodeNamespaces.ClosedGroupInfo, + }); + + expectRetrieveWith({ + request: req2, + lastHash: 'lasthash2', + maxSize: -2, + namespace: SnodeNamespaces.ClosedGroupMessages, + }); + }); + + it('with two namespace and lasthashes, 2 hashesToBump ', async () => { + TestUtils.stubUserGroupWrapper('getGroup', { whatever: '' } as any as UserGroupsGet); + const requests = await SnodeAPIRetrieve.buildRetrieveRequest( + [ + { lastHash: 'lasthash1', namespace: SnodeNamespaces.ClosedGroupInfo }, + { lastHash: 'lasthash2', namespace: SnodeNamespaces.ClosedGroupKeys }, + ], + groupPk, + us, + ['hashbump1', 'hashbump2'] + ); + + expect(requests.length).to.be.eq(3); + const req1 = requests[0]; + const req2 = requests[1]; + const req3 = requests[2]; + if (req1.method !== 'retrieve' || req2.method !== 'retrieve') { + throw new Error('expected retrieve method'); + } + if (req3.method !== 'expire') { + throw new Error('expected expire method'); + } + + expectRetrieveWith({ + request: req1, + lastHash: 'lasthash1', + maxSize: -2, + namespace: SnodeNamespaces.ClosedGroupInfo, + }); + + expectRetrieveWith({ + request: req2, + lastHash: 'lasthash2', + maxSize: -2, + namespace: SnodeNamespaces.ClosedGroupKeys, + }); + + expectExpireWith({ + request: req3, + hashes: ['hashbump1', 'hashbump2'], + shortenOrExtend: '', + }); + }); + + it('with 0 namespaces, 2 hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest([], groupPk, us, [ + 'hashbump1', + 'hashbump2', + ]); + + expect(requests.length).to.be.eq(1); + const req1 = requests[0]; + if (req1.method !== 'expire') { + throw new Error('expected expire method'); + } + + expectExpireWith({ + request: req1, + hashes: ['hashbump1', 'hashbump2'], + shortenOrExtend: '', + }); + }); + + it('with 0 namespaces, 0 hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest([], groupPk, us, []); + expect(requests.length).to.be.eq(0); + }); + it('with 0 namespaces, null hashesToBump ', async () => { + const requests = await SnodeAPIRetrieve.buildRetrieveRequest([], groupPk, us, null); + expect(requests.length).to.be.eq(0); + }); + + it('throws if given an invalid group namespace to retrieve from ', async () => { + const pr = async () => + SnodeAPIRetrieve.buildRetrieveRequest( + [ + { lastHash: 'lasthash1', namespace: SnodeNamespaces.ClosedGroupKeys }, + { lastHash: 'lasthash2', namespace: SnodeNamespaces.UserContacts }, + ], + groupPk, + us, + ['hashbump1', 'hashbump2'] + ); + + await expectAsyncToThrow( + pr, + `tried to poll from a non 03 group namespace ${SnodeNamespaces.UserContacts}` + ); + }); + }); +}); diff --git a/ts/test/session/unit/sogsv3/ApiUtil_test.ts b/ts/test/session/unit/sogsv3/ApiUtil_test.ts index af8a35493d..ad7add2199 100644 --- a/ts/test/session/unit/sogsv3/ApiUtil_test.ts +++ b/ts/test/session/unit/sogsv3/ApiUtil_test.ts @@ -7,7 +7,7 @@ import { isSessionRunOpenGroup, } from '../../../../session/apis/open_group_api/opengroupV2/ApiUtil'; import { getOpenGroupV2ConversationId } from '../../../../session/apis/open_group_api/utils/OpenGroupUtils'; -import { getConversationController } from '../../../../session/conversations'; +import { ConvoHub } from '../../../../session/conversations'; import { stubData, stubOpenGroupData, stubWindowLog } from '../../../test-utils/utils'; import { UserUtils } from '../../../../session/utils'; import { TestUtils } from '../../../test-utils'; @@ -87,9 +87,9 @@ describe('APIUtils', () => { stubData('saveConversation').resolves(); stubData('getItemById').resolves(); stubOpenGroupData('getAllV2OpenGroupRooms').resolves(); - getConversationController().reset(); + ConvoHub.use().reset(); - await getConversationController().load(); + await ConvoHub.use().load(); await OpenGroupData.opengroupRoomsLoad(); }); afterEach(() => { @@ -135,25 +135,25 @@ describe('APIUtils', () => { stubData('getItemById').resolves(); stubOpenGroupData('getAllV2OpenGroupRooms').resolves(); getV2OpenGroupRoomsByServerUrl = stubOpenGroupData('getV2OpenGroupRoomsByServerUrl'); - getConversationController().reset(); + ConvoHub.use().reset(); Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns( TestUtils.generateFakePubKeyStr() ); - await getConversationController().load(); + await ConvoHub.use().load(); - const convoOurIp = await getConversationController().getOrCreateAndWait( + const convoOurIp = await ConvoHub.use().getOrCreateAndWait( convoIdOurIp, ConversationTypeEnum.GROUP ); convoOurIp.set({ active_at: Date.now() }); - const convoOurUrl = await getConversationController().getOrCreateAndWait( + const convoOurUrl = await ConvoHub.use().getOrCreateAndWait( convoIdOurUrl, ConversationTypeEnum.GROUP ); convoOurUrl.set({ active_at: Date.now() }); - const convoNotOur = await getConversationController().getOrCreateAndWait( + const convoNotOur = await ConvoHub.use().getOrCreateAndWait( convoIdNotOur, ConversationTypeEnum.GROUP ); diff --git a/ts/test/session/unit/sogsv3/knownBlindedKeys_test.ts b/ts/test/session/unit/sogsv3/knownBlindedKeys_test.ts index 9a7d0a09f4..ef35071e82 100644 --- a/ts/test/session/unit/sogsv3/knownBlindedKeys_test.ts +++ b/ts/test/session/unit/sogsv3/knownBlindedKeys_test.ts @@ -18,11 +18,11 @@ import { tryMatchBlindWithStandardKey, writeKnownBlindedKeys, } from '../../../../session/apis/open_group_api/sogsv3/knownBlindedkeys'; -import { getConversationController } from '../../../../session/conversations'; +import { ConvoHub } from '../../../../session/conversations'; import { LibSodiumWrappers } from '../../../../session/crypto'; import { UserUtils } from '../../../../session/utils'; -import { expectAsyncToThrow, stubData, stubWindowLog } from '../../../test-utils/utils'; import { TestUtils } from '../../../test-utils'; +import { expectAsyncToThrow, stubData, stubWindowLog } from '../../../test-utils/utils'; import { ConversationTypeEnum } from '../../../../models/types'; const serverPublicKey = 'serverPublicKey'; @@ -235,7 +235,7 @@ describe('knownBlindedKeys', () => { }, 'blindedId is not a blinded key'); }); - it('throws with realSessionId not unlinded', async () => { + it('throws with realSessionId not unblinded', async () => { getItemById.resolves(); await loadKnownBlindedKeys(); @@ -308,7 +308,7 @@ describe('knownBlindedKeys', () => { }); }); - it('adds a new one if not matching serverpubkey', async () => { + it('adds a new one if not matching server pubkey', async () => { getItemById.resolves(); await loadKnownBlindedKeys(); await addCachedBlindedKey({ @@ -479,7 +479,7 @@ describe('knownBlindedKeys', () => { describe('when not in cache', () => { beforeEach(async () => { - getConversationController().reset(); + ConvoHub.use().reset(); getItemById.resolves(); stubData('getAllConversations').resolves([]); @@ -487,7 +487,7 @@ describe('knownBlindedKeys', () => { Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns( TestUtils.generateFakePubKeyStr() ); - await getConversationController().load(); + await ConvoHub.use().load(); }); it('does iterate over all the conversations and find the first one matching (fails)', async () => { @@ -500,7 +500,7 @@ describe('knownBlindedKeys', () => { await addCachedBlindedKey(shouldBeWrittenToDb); - Sinon.stub(getConversationController(), 'getConversations').returns([]); + Sinon.stub(ConvoHub.use(), 'getConversations').returns([]); const real = await findCachedBlindedMatchOrLookItUp(realSessionId, serverPublicKey, sodium); // we should have 1 call here as the value was already added to the cache expect(createOrUpdateItem.callCount).to.eq(1); @@ -514,18 +514,12 @@ describe('knownBlindedKeys', () => { it('does iterate over all the conversations and find the first one matching (passes)', async () => { await loadKnownBlindedKeys(); // adding a private conversation with a known match of the blinded pubkey we have - await getConversationController().getOrCreateAndWait( - realSessionId, - ConversationTypeEnum.PRIVATE - ); - const convo = await getConversationController().getOrCreateAndWait( + ConvoHub.use().getOrCreate(realSessionId, ConversationTypeEnum.PRIVATE); + const convo = await ConvoHub.use().getOrCreateAndWait( knownBlindingMatch.realSessionId, ConversationTypeEnum.PRIVATE ); - await getConversationController().getOrCreateAndWait( - realSessionId2, - ConversationTypeEnum.PRIVATE - ); + ConvoHub.use().getOrCreate(realSessionId2, ConversationTypeEnum.PRIVATE); convo.set({ isApproved: true }); const real = await findCachedBlindedMatchOrLookItUp( knownBlindingMatch.blindedId, @@ -546,7 +540,7 @@ describe('knownBlindedKeys', () => { it('does iterate over all the conversations but is not approved so must fail', async () => { await loadKnownBlindedKeys(); // adding a private conversation with a known match of the blinded pubkey we have - const convo = await getConversationController().getOrCreateAndWait( + const convo = await ConvoHub.use().getOrCreateAndWait( knownBlindingMatch.realSessionId, ConversationTypeEnum.PRIVATE ); @@ -564,7 +558,7 @@ describe('knownBlindedKeys', () => { it('does iterate over all the conversations but is not private so must fail: group', async () => { await loadKnownBlindedKeys(); // adding a private conversation with a known match of the blinded pubkey we have - const convo = await getConversationController().getOrCreateAndWait( + const convo = await ConvoHub.use().getOrCreateAndWait( knownBlindingMatch.realSessionId, ConversationTypeEnum.GROUP ); @@ -578,8 +572,8 @@ describe('knownBlindedKeys', () => { expect(real).to.eq(undefined); }); - it('does iterate over all the conversations but is not private so must fail: groupv3', () => { - // we actually cannot test this one as we would need to create a conversation with groupv3 as type but 05 as prefix, and the conversation controller denies it, as expected + it('does iterate over all the conversations but is not private so must fail: groupv2', () => { + // we actually cannot test this one as we would need to create a conversation with groupv2 as type but 05 as prefix, and the conversation controller denies it, as expected }); }); }); diff --git a/ts/test/session/unit/swarm_polling/SwarmPolling_getNamespacesToPollFrom_test.ts b/ts/test/session/unit/swarm_polling/SwarmPolling_getNamespacesToPollFrom_test.ts new file mode 100644 index 0000000000..9d8021f83f --- /dev/null +++ b/ts/test/session/unit/swarm_polling/SwarmPolling_getNamespacesToPollFrom_test.ts @@ -0,0 +1,52 @@ +import { expect } from 'chai'; +import Sinon from 'sinon'; +import { getSwarmPollingInstance } from '../../../../session/apis/snode_api'; +import { SnodeNamespaces } from '../../../../session/apis/snode_api/namespaces'; +import { SwarmPolling } from '../../../../session/apis/snode_api/swarmPolling'; +import { TestUtils } from '../../../test-utils'; +import { ConversationTypeEnum } from '../../../../models/types'; + +describe('SwarmPolling:getNamespacesToPollFrom', () => { + let swarmPolling: SwarmPolling; + + beforeEach(async () => { + TestUtils.stubLibSessionWorker(undefined); + TestUtils.stubWindowLog(); + swarmPolling = getSwarmPollingInstance(); + swarmPolling.resetSwarmPolling(); + }); + + afterEach(() => { + Sinon.restore(); + }); + + it('for us/private ', () => { + expect(swarmPolling.getNamespacesToPollFrom(ConversationTypeEnum.PRIVATE)).to.deep.equal([ + SnodeNamespaces.Default, + SnodeNamespaces.UserProfile, + SnodeNamespaces.UserContacts, + SnodeNamespaces.UserGroups, + SnodeNamespaces.ConvoInfoVolatile, + ]); + }); + + it('for group v2 (03 prefix) ', () => { + expect(swarmPolling.getNamespacesToPollFrom(ConversationTypeEnum.GROUPV2)).to.deep.equal([ + SnodeNamespaces.ClosedGroupRevokedRetrievableMessages, + SnodeNamespaces.ClosedGroupMessages, + SnodeNamespaces.ClosedGroupInfo, + SnodeNamespaces.ClosedGroupMembers, + SnodeNamespaces.ClosedGroupKeys, + ]); + }); + + it('for legacy group ', () => { + expect(swarmPolling.getNamespacesToPollFrom(ConversationTypeEnum.GROUP)).to.deep.equal([ + SnodeNamespaces.LegacyClosedGroup, + ]); + }); + + it('for unknown type ', () => { + expect(() => swarmPolling.getNamespacesToPollFrom('invalidtype' as any)).to.throw(''); // empty string just means that we want it to throw anything + }); +}); diff --git a/ts/test/session/unit/swarm_polling/SwarmPolling_getPollingTimeout_test.ts b/ts/test/session/unit/swarm_polling/SwarmPolling_getPollingTimeout_test.ts new file mode 100644 index 0000000000..4b2673db68 --- /dev/null +++ b/ts/test/session/unit/swarm_polling/SwarmPolling_getPollingTimeout_test.ts @@ -0,0 +1,139 @@ +import { expect } from 'chai'; +import Sinon from 'sinon'; +import { ConversationTypeEnum } from '../../../../models/types'; +import { + SwarmPolling, + getSwarmPollingInstance, +} from '../../../../session/apis/snode_api/swarmPolling'; +import { SWARM_POLLING_TIMEOUT } from '../../../../session/constants'; +import { ConvoHub } from '../../../../session/conversations/ConversationController'; +import { PubKey } from '../../../../session/types'; +import { TestUtils } from '../../../test-utils'; +import { stubData } from '../../../test-utils/utils'; + +describe('SwarmPolling:getPollingTimeout', () => { + let swarmPolling: SwarmPolling; + + beforeEach(async () => { + TestUtils.stubLibSessionWorker(undefined); + TestUtils.stubWindowLog(); + swarmPolling = getSwarmPollingInstance(); + swarmPolling.resetSwarmPolling(); + ConvoHub.use().reset(); + stubData('getAllConversations').resolves([]); + await ConvoHub.use().load(); + }); + + afterEach(() => { + Sinon.restore(); + ConvoHub.use().reset(); + }); + + it('returns INACTIVE for non existing convo', () => { + const fakeConvo = TestUtils.generateFakePubKey(); + + expect(swarmPolling.getPollingTimeout(fakeConvo)).to.eq(SWARM_POLLING_TIMEOUT.INACTIVE); + }); + + describe('legacy groups', () => { + it('returns ACTIVE for convo with less than two days old activeAt', () => { + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakePubKeyStr(), + ConversationTypeEnum.GROUP + ); + convo.set('active_at', Date.now() - 2 * 23 * 3600 * 1000); // 23 * 2 = 46 hours old + expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( + SWARM_POLLING_TIMEOUT.ACTIVE + ); + }); + + it('returns INACTIVE for convo with undefined activeAt', () => { + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakePubKeyStr(), + ConversationTypeEnum.GROUP + ); + convo.set('active_at', undefined); + expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( + SWARM_POLLING_TIMEOUT.INACTIVE + ); + }); + + it('returns MEDIUM_ACTIVE for convo with activeAt of more than 2 days but less than a week old', () => { + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakePubKeyStr(), + ConversationTypeEnum.GROUP + ); + convo.set('active_at', Date.now() - 1000 * 3600 * 25 * 2); // 25 hours x 2 = 50 hours old + expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( + SWARM_POLLING_TIMEOUT.MEDIUM_ACTIVE + ); + + convo.set('active_at', Date.now() - 1000 * 3600 * 24 * 7 + 3600); // a week minus an hour old + expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( + SWARM_POLLING_TIMEOUT.MEDIUM_ACTIVE + ); + }); + + it('returns INACTIVE for convo with activeAt of more than a week', () => { + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakePubKeyStr(), + ConversationTypeEnum.GROUP + ); + convo.set('active_at', Date.now() - 1000 * 3600 * 24 * 8); // 8 days + expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( + SWARM_POLLING_TIMEOUT.INACTIVE + ); + }); + }); + + describe('groupv2', () => { + it('returns ACTIVE for convo with less than two days old activeAt', () => { + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakeClosedGroupV2PkStr(), + ConversationTypeEnum.GROUPV2 + ); + convo.set('active_at', Date.now() - 2 * 23 * 3600 * 1000); // 23 * 2 = 46 hours old + expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( + SWARM_POLLING_TIMEOUT.ACTIVE + ); + }); + + it('returns INACTIVE for convo with undefined activeAt', () => { + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakeClosedGroupV2PkStr(), + ConversationTypeEnum.GROUPV2 + ); + convo.set('active_at', undefined); + expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( + SWARM_POLLING_TIMEOUT.INACTIVE + ); + }); + + it('returns MEDIUM_ACTIVE for convo with activeAt of more than 2 days but less than a week old', () => { + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakeClosedGroupV2PkStr(), + ConversationTypeEnum.GROUPV2 + ); + convo.set('active_at', Date.now() - 1000 * 3600 * 25 * 2); // 25 hours x 2 = 50 hours old + expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( + SWARM_POLLING_TIMEOUT.MEDIUM_ACTIVE + ); + + convo.set('active_at', Date.now() - 1000 * 3600 * 24 * 7 + 3600); // a week minus an hour old + expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( + SWARM_POLLING_TIMEOUT.MEDIUM_ACTIVE + ); + }); + + it('returns INACTIVE for convo with activeAt of more than a week', () => { + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakeClosedGroupV2PkStr(), + ConversationTypeEnum.GROUPV2 + ); + convo.set('active_at', Date.now() - 1000 * 3600 * 24 * 8); // 8 days + expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( + SWARM_POLLING_TIMEOUT.INACTIVE + ); + }); + }); +}); diff --git a/ts/test/session/unit/swarm_polling/SwarmPolling_pollForAllKeys_test.ts b/ts/test/session/unit/swarm_polling/SwarmPolling_pollForAllKeys_test.ts new file mode 100644 index 0000000000..11fa98c893 --- /dev/null +++ b/ts/test/session/unit/swarm_polling/SwarmPolling_pollForAllKeys_test.ts @@ -0,0 +1,570 @@ +import chai from 'chai'; +import { describe } from 'mocha'; +import Sinon, * as sinon from 'sinon'; + +import { + GroupPubkeyType, + LegacyGroupInfo, + PubkeyType, + UserGroupsGet, +} from 'libsession_util_nodejs'; +import { ConversationModel, Convo } from '../../../../models/conversation'; +import { ConversationTypeEnum } from '../../../../models/types'; +import { getSwarmPollingInstance } from '../../../../session/apis/snode_api'; +import { resetHardForkCachedValues } from '../../../../session/apis/snode_api/hfHandling'; +import { SnodeAPIRetrieve } from '../../../../session/apis/snode_api/retrieveRequest'; +import { SnodePool } from '../../../../session/apis/snode_api/snodePool'; +import { SwarmPolling } from '../../../../session/apis/snode_api/swarmPolling'; +import { ConvoHub } from '../../../../session/conversations'; +import { PubKey } from '../../../../session/types'; +import { UserUtils } from '../../../../session/utils'; +import { sleepFor } from '../../../../session/utils/Promise'; +import { UserSync } from '../../../../session/utils/job_runners/jobs/UserSyncJob'; +import { TestUtils } from '../../../test-utils'; +import { generateFakeSnodes, stubData } from '../../../test-utils/utils'; + +const { expect } = chai; + +const pollOnceForUsArgs = (us: string) => [[us, ConversationTypeEnum.PRIVATE]]; +const pollOnceForGroupLegacyArgs = (groupLegacy: string) => [ + [groupLegacy, ConversationTypeEnum.GROUP], +]; + +const pollOnceForGroupArgs = (group: GroupPubkeyType) => [[group, ConversationTypeEnum.GROUPV2]]; + +function stubWithLegacyGroups(pubkeys: Array) { + const groups = pubkeys.map(m => ({ pubkeyHex: m }) as LegacyGroupInfo); + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', groups); +} + +function stubWithGroups(pubkeys: Array) { + const groups = pubkeys.map(m => ({ pubkeyHex: m }) as UserGroupsGet); + TestUtils.stubUserGroupWrapper('getAllGroups', groups); +} + +describe('SwarmPolling:pollForAllKeys', () => { + const ourPubkey = TestUtils.generateFakePubKey(); + const ourNumber = ourPubkey.key as PubkeyType; + + let pollOnceForKeySpy: Sinon.SinonSpy< + Parameters, + ReturnType + >; + let swarmPolling: SwarmPolling; + let getItemByIdStub: Sinon.SinonStub; + let clock: Sinon.SinonFakeTimers; + + beforeEach(async () => { + ConvoHub.use().reset(); + TestUtils.stubWindowFeatureFlags(); + TestUtils.stubWindowLog(); + Sinon.stub(UserSync, 'queueNewJobIfNeeded').resolves(); + + // Utils Stubs + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(ourNumber); + + stubData('getAllConversations').resolves([]); + getItemByIdStub = TestUtils.stubData('getItemById'); + stubData('saveConversation').resolves(); + stubData('getSwarmNodesForPubkey').resolves(); + stubData('getLastHashBySnode').resolves(); + + Sinon.stub(Convo, 'commitConversationAndRefreshWrapper').resolves(); + + TestUtils.stubLibSessionWorker(undefined); + + Sinon.stub(SnodePool, 'getSwarmFor').resolves(generateFakeSnodes(5)); + Sinon.stub(SnodeAPIRetrieve, 'retrieveNextMessagesNoRetries').resolves([]); + + TestUtils.stubWindow('inboxStore', undefined); + TestUtils.stubWindow('getGlobalOnlineStatus', () => true); + TestUtils.stubWindowLog(); + + const convoController = ConvoHub.use(); + await convoController.load(); + ConvoHub.use().getOrCreate(ourPubkey.key, ConversationTypeEnum.PRIVATE); + + swarmPolling = getSwarmPollingInstance(); + swarmPolling.resetSwarmPolling(); + pollOnceForKeySpy = Sinon.spy(swarmPolling, 'pollOnceForKey'); + + clock = sinon.useFakeTimers({ now: Date.now(), shouldAdvanceTime: true }); + stubData('createOrUpdateItem').resolves(); + }); + + afterEach(() => { + Sinon.restore(); + ConvoHub.use().reset(); + clock.restore(); + resetHardForkCachedValues(); + }); + + it('does run for our pubkey even if activeAt is really old ', async () => { + stubWithGroups([]); + stubWithLegacyGroups([]); + const convo = ConvoHub.use().getOrCreate(ourNumber, ConversationTypeEnum.PRIVATE); + convo.set('active_at', Date.now() - 1000 * 3600 * 25); + await swarmPolling.start(true); + + expect(pollOnceForKeySpy.callCount).to.eq(1); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + }); + + it('does run for our pubkey even if activeAt is recent ', async () => { + stubWithGroups([]); + stubWithLegacyGroups([]); + const convo = ConvoHub.use().getOrCreate(ourNumber, ConversationTypeEnum.PRIVATE); + convo.set('active_at', Date.now()); + await swarmPolling.start(true); + + expect(pollOnceForKeySpy.callCount).to.eq(1); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + }); + + describe('legacy group', () => { + it('does run for group pubkey on start no matter the recent timestamp', async () => { + const groupPk = TestUtils.generateFakePubKeyStr(); + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUP); + stubWithLegacyGroups([groupPk]); + stubWithGroups([]); + convo.set('active_at', Date.now()); + const groupConvoPubkey = PubKey.cast(groupPk); + swarmPolling.addGroupId(groupConvoPubkey); + await swarmPolling.start(true); + + // our pubkey will be polled for, hence the 2 + expect(pollOnceForKeySpy.callCount).to.eq(2); + + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupLegacyArgs(groupPk)); + }); + + it('does only poll from -10 for closed groups', async () => { + const groupPk = TestUtils.generateFakePubKeyStr(); + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUP); + + stubWithLegacyGroups([groupPk]); + stubWithGroups([]); + convo.set('active_at', 1); + swarmPolling.addGroupId(PubKey.cast(groupPk)); + + await swarmPolling.start(true); + + // our pubkey will be polled for, hence the 2 + expect(pollOnceForKeySpy.callCount).to.eq(2); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupLegacyArgs(groupPk)); + getItemByIdStub.restore(); + getItemByIdStub = TestUtils.stubData('getItemById'); + + getItemByIdStub.resolves(); + }); + + it('does run for group pubkey on start but not another time if activeAt is old ', async () => { + const groupPk = TestUtils.generateFakePubKeyStr(); + const groupConvo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUP); + + stubWithLegacyGroups([groupPk]); + stubWithGroups([]); + + groupConvo.set('active_at', 1); // really old, but active + swarmPolling.addGroupId(groupPk); + // this calls the stub 2 times, one for our direct pubkey and one for the group + await swarmPolling.start(true); + expect(pollOnceForKeySpy.callCount).to.eq(2); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupLegacyArgs(groupPk)); + // this should only call the stub one more time: for our direct pubkey but not for the group pubkey + await swarmPolling.pollForAllKeys(); + expect(pollOnceForKeySpy.callCount).to.eq(3); + expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + }); + + it('does run twice if activeAt less than one hour ', async () => { + const groupPk = TestUtils.generateFakePubKeyStr(); + + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUP); + // fake that the group is part of the wrapper otherwise we stop tracking it after the first polling event + stubWithLegacyGroups([groupPk]); + stubWithGroups([]); + + convo.set('active_at', Date.now()); + swarmPolling.addGroupId(groupPk); + await swarmPolling.start(true); + expect(pollOnceForKeySpy.callCount).to.eq(2); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupLegacyArgs(groupPk)); + pollOnceForKeySpy.resetHistory(); + clock.tick(9000); + + // no need to do that as the tick will trigger a call in all cases after 5 secs await swarmPolling.pollForAllKeys(); + /** this is not easy to explain, but + * - during the swarmPolling.start, we get two calls to pollOnceForKeySpy (one for our id and one for group id) + * - the clock ticks 9sec, and another call of pollOnceForKeySpy get started, but as we do not await them, this test fails. + * the only fix is to restore the clock and force the a small sleep to let the thing run in bg + */ + + await sleepFor(10); + + expect(pollOnceForKeySpy.callCount).to.eq(2); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupLegacyArgs(groupPk)); + }); + + it('does run twice if activeAt is inactive and we tick longer than 2 minutes', async () => { + const groupPk = TestUtils.generateFakePubKeyStr(); + + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUP); + // fake that the group is part of the wrapper otherwise we stop tracking it after the first polling event + + stubWithLegacyGroups([groupPk]); + stubWithGroups([]); + pollOnceForKeySpy.resetHistory(); + convo.set('active_at', Date.now()); + swarmPolling.addGroupId(groupPk); + // this call the stub two times already, one for our direct pubkey and one for the group + await swarmPolling.start(true); + const timeToTick = 3 * 60 * 1000; + swarmPolling.forcePolledTimestamp(groupPk, Date.now() - timeToTick); + // more than week old, so inactive group but we have to tick after more than 2 min + convo.set('active_at', Date.now() - 7 * 25 * 3600 * 1000); + clock.tick(timeToTick); + /** this is not easy to explain, but + * - during the swarmPolling.start, we get two calls to pollOnceForKeySpy (one for our id and one for group od) + * - the clock ticks 9sec, and another call of pollOnceForKeySpy get started, but as we do not await them, this test fails. + * the only fix is to restore the clock and force the a small sleep to let the thing run in bg + */ + await sleepFor(10); + // we should have two more calls here, so 4 total. + expect(pollOnceForKeySpy.callCount).to.eq(4); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupLegacyArgs(groupPk)); + expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.getCalls()[3].args).to.deep.eq(pollOnceForGroupLegacyArgs(groupPk)); + }); + + it('does run once only if group is inactive and we tick less than 2 minutes ', async () => { + const groupPk = TestUtils.generateFakePubKeyStr(); + + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUP); + pollOnceForKeySpy.resetHistory(); + + stubWithLegacyGroups([groupPk]); + stubWithGroups([]); + + convo.set('active_at', Date.now()); + swarmPolling.addGroupId(groupPk); + await swarmPolling.start(true); + + // more than a week old, we should not tick after just 5 seconds + convo.set('active_at', Date.now() - 7 * 24 * 3600 * 1000 - 3600 * 1000); + + clock.tick(1 * 60 * 1000); + await sleepFor(10); + + // we should have only one more call here, the one for our direct pubkey fetch + expect(pollOnceForKeySpy.callCount).to.eq(3); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupLegacyArgs(groupPk)); // this one comes from the swarmPolling.start + expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + }); + + describe('multiple runs', () => { + let convo: ConversationModel; + let groupConvoPubkey: PubKey; + + beforeEach(async () => { + convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakePubKeyStr(), + ConversationTypeEnum.GROUP + ); + + stubWithLegacyGroups([convo.id]); + stubWithGroups([]); + + convo.set('active_at', Date.now()); + groupConvoPubkey = PubKey.cast(convo.id as string); + swarmPolling.addGroupId(groupConvoPubkey); + await swarmPolling.start(true); + }); + + afterEach(() => { + Sinon.restore(); + ConvoHub.use().reset(); + clock.restore(); + resetHardForkCachedValues(); + }); + + it('does run twice if activeAt is less than 2 days', async () => { + pollOnceForKeySpy.resetHistory(); + // less than 2 days old, this is an active group + convo.set('active_at', Date.now() - 2 * 24 * 3600 * 1000 - 3600 * 1000); + + const timeToTick = 6 * 1000; + + swarmPolling.forcePolledTimestamp(convo.id, timeToTick); + // we tick more than 5 sec + clock.tick(timeToTick); + + await swarmPolling.pollForAllKeys(); + // we have 4 calls total. 2 for our direct promises run each 5 seconds, and 2 for the group pubkey active (so run every 5 sec too) + expect(pollOnceForKeySpy.callCount).to.eq(4); + // first two calls are our pubkey + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq( + pollOnceForGroupLegacyArgs(groupConvoPubkey.key) + ); + + expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.getCalls()[3].args).to.deep.eq( + pollOnceForGroupLegacyArgs(groupConvoPubkey.key) + ); + }); + + it('does run twice if activeAt is more than 2 days old and we tick more than one minute', async () => { + pollOnceForKeySpy.resetHistory(); + TestUtils.stubWindowLog(); + convo.set('active_at', Date.now() - 2 * 25 * 3600 * 1000); // medium active + // fake that the group is part of the wrapper otherwise we stop tracking it after the first polling event + + const timeToTick = 65 * 1000; // more than one minute + swarmPolling.forcePolledTimestamp(convo.id, timeToTick); + clock.tick(timeToTick); // should tick twice more (one more our direct pubkey and one for the group) + + // fake that the group is part of the wrapper otherwise we stop tracking it after the first polling event + + await swarmPolling.pollForAllKeys(); + + expect(pollOnceForKeySpy.callCount).to.eq(4); + + // first two calls are our pubkey + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq( + pollOnceForGroupLegacyArgs(groupConvoPubkey.key) + ); + expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.getCalls()[3].args).to.deep.eq( + pollOnceForGroupLegacyArgs(groupConvoPubkey.key) + ); + }); + }); + }); + + describe('03 group', () => { + it('does run for group pubkey on start no matter the recent timestamp', async () => { + const groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUPV2); + stubWithLegacyGroups([]); + stubWithGroups([groupPk]); + convo.set('active_at', Date.now()); + const groupConvoPubkey = PubKey.cast(groupPk); + swarmPolling.addGroupId(groupConvoPubkey); + await swarmPolling.start(true); + + // our pubkey will be polled for, hence the 2 + expect(pollOnceForKeySpy.callCount).to.eq(2); + + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupArgs(groupPk)); + }); + + it('does only poll from -10 for closed groups', async () => { + const groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUPV2); + stubWithLegacyGroups([]); + stubWithGroups([groupPk]); + convo.set('active_at', 1); + swarmPolling.addGroupId(PubKey.cast(groupPk)); + + await swarmPolling.start(true); + + // our pubkey will be polled for, hence the 2 + expect(pollOnceForKeySpy.callCount).to.eq(2); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupArgs(groupPk)); + getItemByIdStub.restore(); + getItemByIdStub = TestUtils.stubData('getItemById'); + + getItemByIdStub.resolves(); + }); + + it('does run for group pubkey on start but not another time if activeAt is old ', async () => { + const groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUPV2); + stubWithLegacyGroups([]); + stubWithGroups([groupPk]); + + convo.set('active_at', 1); // really old, but active + swarmPolling.addGroupId(groupPk); + // this calls the stub 2 times, one for our direct pubkey and one for the group + await swarmPolling.start(true); + expect(pollOnceForKeySpy.callCount).to.eq(2); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupArgs(groupPk)); + // this should only call the stub one more time: for our direct pubkey but not for the group pubkey + await swarmPolling.pollForAllKeys(); + expect(pollOnceForKeySpy.callCount).to.eq(3); + expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + }); + + it('does run twice if activeAt less than one hour ', async () => { + const groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUPV2); + stubWithLegacyGroups([]); + stubWithGroups([groupPk]); + + convo.set('active_at', Date.now()); + swarmPolling.addGroupId(groupPk); + await swarmPolling.start(true); + expect(pollOnceForKeySpy.callCount).to.eq(2); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupArgs(groupPk)); + pollOnceForKeySpy.resetHistory(); + clock.tick(9000); + + // no need to do that as the tick will trigger a call in all cases after 5 secs await swarmPolling.pollForAllKeys(); + /** this is not easy to explain, but + * - during the swarmPolling.start, we get two calls to pollOnceForKeySpy (one for our id and one for group id) + * - the clock ticks 9sec, and another call of pollOnceForKeySpy get started, but as we do not await them, this test fails. + * the only fix is to restore the clock and force the a small sleep to let the thing run in bg + */ + + await sleepFor(10); + + expect(pollOnceForKeySpy.callCount).to.eq(2); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupArgs(groupPk)); + }); + + it('does run twice if activeAt is inactive and we tick longer than 2 minutes', async () => { + const groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUPV2); + stubWithLegacyGroups([]); + stubWithGroups([groupPk]); + + pollOnceForKeySpy.resetHistory(); + convo.set('active_at', Date.now()); + swarmPolling.addGroupId(groupPk); + // this call the stub two times already, one for our direct pubkey and one for the group + await swarmPolling.start(true); + const timeToTick = 3 * 60 * 1000; + swarmPolling.forcePolledTimestamp(groupPk, Date.now() - timeToTick); + // more than week old, so inactive group but we have to tick after more than 2 min + convo.set('active_at', Date.now() - 7 * 25 * 3600 * 1000); + clock.tick(timeToTick); + /** this is not easy to explain, but + * - during the swarmPolling.start, we get two calls to pollOnceForKeySpy (one for our id and one for group od) + * - the clock ticks 9sec, and another call of pollOnceForKeySpy get started, but as we do not await them, this test fails. + * the only fix is to restore the clock and force the a small sleep to let the thing run in bg + */ + await sleepFor(10); + // we should have two more calls here, so 4 total. + expect(pollOnceForKeySpy.callCount).to.eq(4); + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupArgs(groupPk)); + expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.getCalls()[3].args).to.deep.eq(pollOnceForGroupArgs(groupPk)); + }); + + it('does run once only if group is inactive and we tick less than 2 minutes ', async () => { + const groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + const convo = ConvoHub.use().getOrCreate(groupPk, ConversationTypeEnum.GROUPV2); + stubWithLegacyGroups([]); + stubWithGroups([groupPk]); + + convo.set('active_at', Date.now()); + swarmPolling.addGroupId(groupPk); + await swarmPolling.start(true); + + // more than a week old, we should not tick after just 5 seconds + convo.set('active_at', Date.now() - 7 * 24 * 3600 * 1000 - 3600 * 1000); + + clock.tick(1 * 60 * 1000); + await sleepFor(10); + + // we should have only one more call here, the one for our direct pubkey fetch + expect(pollOnceForKeySpy.callCount).to.eq(3); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq(pollOnceForGroupArgs(groupPk)); // this one comes from the swarmPolling.start + expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + }); + + describe('multiple runs', () => { + let convo: ConversationModel; + let groupConvoPubkey: PubKey; + + beforeEach(async () => { + convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakeClosedGroupV2PkStr(), + ConversationTypeEnum.GROUPV2 + ); + + stubWithLegacyGroups([]); + stubWithGroups([convo.id as GroupPubkeyType]); + + convo.set('active_at', Date.now()); + groupConvoPubkey = PubKey.cast(convo.id as string); + swarmPolling.addGroupId(groupConvoPubkey); + await swarmPolling.start(true); + }); + + afterEach(() => { + Sinon.restore(); + ConvoHub.use().reset(); + clock.restore(); + resetHardForkCachedValues(); + }); + + it('does run twice if activeAt is less than 2 days', async () => { + pollOnceForKeySpy.resetHistory(); + // less than 2 days old, this is an active group + convo.set('active_at', Date.now() - 2 * 24 * 3600 * 1000 - 3600 * 1000); + + const timeToTick = 6 * 1000; + + swarmPolling.forcePolledTimestamp(convo.id, timeToTick); + // we tick more than 5 sec + clock.tick(timeToTick); + + await swarmPolling.pollForAllKeys(); + // we have 4 calls total. 2 for our direct promises run each 5 seconds, and 2 for the group pubkey active (so run every 5 sec too) + expect(pollOnceForKeySpy.callCount).to.eq(4); + // first two calls are our pubkey + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq( + pollOnceForGroupArgs(groupConvoPubkey.key as GroupPubkeyType) + ); + + expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.getCalls()[3].args).to.deep.eq( + pollOnceForGroupArgs(groupConvoPubkey.key as GroupPubkeyType) + ); + }); + + it('does run twice if activeAt is more than 2 days old and we tick more than one minute', async () => { + pollOnceForKeySpy.resetHistory(); + TestUtils.stubWindowLog(); + convo.set('active_at', Date.now() - 2 * 25 * 3600 * 1000); // medium active + // fake that the group is part of the wrapper otherwise we stop tracking it after the first polling event + + const timeToTick = 65 * 1000; // more than one minute + swarmPolling.forcePolledTimestamp(convo.id, timeToTick); + clock.tick(timeToTick); // should tick twice more (one more our direct pubkey and one for the group) + + // fake that the group is part of the wrapper otherwise we stop tracking it after the first polling event + + await swarmPolling.pollForAllKeys(); + + expect(pollOnceForKeySpy.callCount).to.eq(4); + + // first two calls are our pubkey + expect(pollOnceForKeySpy.firstCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.secondCall.args).to.deep.eq( + pollOnceForGroupArgs(groupConvoPubkey.key as GroupPubkeyType) + ); + expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq(pollOnceForUsArgs(ourPubkey.key)); + expect(pollOnceForKeySpy.getCalls()[3].args).to.deep.eq( + pollOnceForGroupArgs(groupConvoPubkey.key as GroupPubkeyType) + ); + }); + }); + }); +}); diff --git a/ts/test/session/unit/swarm_polling/SwarmPolling_pollingDetails_test.ts b/ts/test/session/unit/swarm_polling/SwarmPolling_pollingDetails_test.ts new file mode 100644 index 0000000000..fd5db020ba --- /dev/null +++ b/ts/test/session/unit/swarm_polling/SwarmPolling_pollingDetails_test.ts @@ -0,0 +1,277 @@ +import { expect } from 'chai'; +import { LegacyGroupInfo, UserGroupsGet } from 'libsession_util_nodejs'; +import Sinon from 'sinon'; +import { getSwarmPollingInstance } from '../../../../session/apis/snode_api'; +import { resetHardForkCachedValues } from '../../../../session/apis/snode_api/hfHandling'; +import { SwarmPolling } from '../../../../session/apis/snode_api/swarmPolling'; +import { SWARM_POLLING_TIMEOUT } from '../../../../session/constants'; +import { PubKey } from '../../../../session/types'; +import { UserUtils } from '../../../../session/utils'; +import { TestUtils } from '../../../test-utils'; +import { stubData } from '../../../test-utils/utils'; +import { ConversationTypeEnum } from '../../../../models/types'; + +describe('getPollingDetails', () => { + // Initialize new stubbed cache + const ourNumber = TestUtils.generateFakePubKeyStr(); + + let swarmPolling: SwarmPolling; + + let clock: Sinon.SinonFakeTimers; + beforeEach(async () => { + TestUtils.stubWindowFeatureFlags(); + TestUtils.stubWindowLog(); + stubData('createOrUpdateItem').resolves(); + + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(ourNumber); + + swarmPolling = getSwarmPollingInstance(); + TestUtils.stubLibSessionWorker(undefined); + + clock = Sinon.useFakeTimers({ now: Date.now(), shouldAdvanceTime: true }); + }); + + afterEach(() => { + Sinon.restore(); + clock.restore(); + resetHardForkCachedValues(); + }); + + it('without anything else, we should be part of it', async () => { + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', []); + TestUtils.stubUserGroupWrapper('getAllGroups', []); + swarmPolling.resetSwarmPolling(); + + const details = await swarmPolling.getPollingDetails([]); + expect(details.toPollDetails.length).to.be.eq(1); + expect(details.toPollDetails[0][0]).to.be.eq(ourNumber); + }); + + it('throws if polling entries include our pk', async () => { + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', []); + TestUtils.stubUserGroupWrapper('getAllGroups', []); + swarmPolling.resetSwarmPolling(); + + const fn = async () => + swarmPolling.getPollingDetails([{ pubkey: PubKey.cast(ourNumber), lastPolledTimestamp: 0 }]); + await expect(fn()).to.be.rejectedWith(''); + }); + + describe("groups not in wrapper should be included in 'to leave' only", () => { + it('legacy group', async () => { + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', []); + TestUtils.stubUserGroupWrapper('getAllGroups', []); + const groupPk = TestUtils.generateFakePubKeyStr(); + + Sinon.stub(swarmPolling, 'getPollingTimeout').returns(SWARM_POLLING_TIMEOUT.ACTIVE); + + const { groupsToLeave, legacyGroupsToLeave, toPollDetails } = + await swarmPolling.getPollingDetails([ + { pubkey: PubKey.cast(groupPk), lastPolledTimestamp: 0 }, + ]); + expect(toPollDetails.length).to.be.eq(1); + expect(toPollDetails[0]).to.be.deep.eq([ourNumber, ConversationTypeEnum.PRIVATE]); + + expect(legacyGroupsToLeave.length).to.be.eq(1); + expect(legacyGroupsToLeave[0]).to.be.eq(groupPk); + expect(groupsToLeave.length).to.be.eq(0); + }); + + it('new group NOT in wrapper should be requested for leaving', async () => { + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', []); + TestUtils.stubUserGroupWrapper('getAllGroups', []); + const groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + + Sinon.stub(swarmPolling, 'getPollingTimeout').returns(SWARM_POLLING_TIMEOUT.ACTIVE); + + const { groupsToLeave, legacyGroupsToLeave, toPollDetails } = + await swarmPolling.getPollingDetails([ + { pubkey: PubKey.cast(groupPk), lastPolledTimestamp: 0 }, + ]); + expect(toPollDetails.length).to.be.eq(1); + expect(toPollDetails[0]).to.be.deep.eq([ourNumber, ConversationTypeEnum.PRIVATE]); + + expect(groupsToLeave.length).to.be.eq(1); + expect(groupsToLeave[0]).to.be.eq(groupPk); + expect(legacyGroupsToLeave.length).to.be.eq(0); + }); + }); + + describe('groups in wrapper but polled recently should not be polled and not to leave neither', () => { + it('legacy group', async () => { + const groupPk = TestUtils.generateFakePubKeyStr(); + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', [ + { pubkeyHex: groupPk } as LegacyGroupInfo, + ]); + TestUtils.stubUserGroupWrapper('getAllGroups', []); + + Sinon.stub(swarmPolling, 'getPollingTimeout').returns(SWARM_POLLING_TIMEOUT.ACTIVE); + + const { groupsToLeave, legacyGroupsToLeave, toPollDetails } = + await swarmPolling.getPollingDetails([ + { pubkey: PubKey.cast(groupPk), lastPolledTimestamp: Date.now() }, + ]); + expect(toPollDetails.length).to.be.eq(1); + expect(toPollDetails[0]).to.be.deep.eq([ourNumber, ConversationTypeEnum.PRIVATE]); + + expect(legacyGroupsToLeave.length).to.be.eq(0); + expect(groupsToLeave.length).to.be.eq(0); + }); + + it('new group', async () => { + const groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', []); + TestUtils.stubUserGroupWrapper('getAllGroups', [{ pubkeyHex: groupPk } as UserGroupsGet]); + + Sinon.stub(swarmPolling, 'getPollingTimeout').returns(SWARM_POLLING_TIMEOUT.ACTIVE); + + const { groupsToLeave, legacyGroupsToLeave, toPollDetails } = + await swarmPolling.getPollingDetails([ + { pubkey: PubKey.cast(groupPk), lastPolledTimestamp: Date.now() }, + ]); + expect(toPollDetails.length).to.be.eq(1); + expect(toPollDetails[0]).to.be.deep.eq([ourNumber, ConversationTypeEnum.PRIVATE]); + expect(groupsToLeave.length).to.be.eq(0); + expect(legacyGroupsToLeave.length).to.be.eq(0); + }); + }); + + describe("groups in wrapper should be included in 'to poll' only", () => { + it('legacy group in wrapper should be polled', async () => { + const groupPk = TestUtils.generateFakePubKeyStr(); + + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', [ + { pubkeyHex: groupPk } as LegacyGroupInfo, + ]); + TestUtils.stubUserGroupWrapper('getAllGroups', []); + swarmPolling.resetSwarmPolling(); + + Sinon.stub(swarmPolling, 'getPollingTimeout').returns(SWARM_POLLING_TIMEOUT.ACTIVE); + + const { groupsToLeave, legacyGroupsToLeave, toPollDetails } = + await swarmPolling.getPollingDetails([ + { pubkey: PubKey.cast(groupPk), lastPolledTimestamp: 0 }, + ]); + expect(toPollDetails.length).to.be.eq(2, 'both our and closed group should be polled'); + expect(toPollDetails[0]).to.be.deep.eq([ourNumber, ConversationTypeEnum.PRIVATE]); + expect(toPollDetails[1]).to.be.deep.eq([groupPk, ConversationTypeEnum.GROUP]); + // no groups to leave nor legacy ones + expect(legacyGroupsToLeave.length).to.be.eq(0); + expect(groupsToLeave.length).to.be.eq(0); + }); + + it('new group in wrapper should be polled', async () => { + const groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', []); + TestUtils.stubUserGroupWrapper('getAllGroups', [{ pubkeyHex: groupPk } as UserGroupsGet]); + + Sinon.stub(swarmPolling, 'getPollingTimeout').returns(SWARM_POLLING_TIMEOUT.ACTIVE); + + const { groupsToLeave, legacyGroupsToLeave, toPollDetails } = + await swarmPolling.getPollingDetails([ + { pubkey: PubKey.cast(groupPk), lastPolledTimestamp: 0 }, + ]); + + expect(toPollDetails.length).to.be.eq(2); + expect(toPollDetails[0]).to.be.deep.eq([ourNumber, ConversationTypeEnum.PRIVATE]); + expect(toPollDetails[1]).to.be.deep.eq([groupPk, ConversationTypeEnum.GROUPV2]); + // no groups to leave nor legacy ones + expect(legacyGroupsToLeave.length).to.be.eq(0); + expect(groupsToLeave.length).to.be.eq(0); + }); + }); + + describe('multiple groups', () => { + it('one legacy group with a few v2 group not in wrapper', async () => { + const groupPk = TestUtils.generateFakePubKeyStr(); + const groupV2Pk = TestUtils.generateFakeClosedGroupV2PkStr(); + const groupV2Pk2 = TestUtils.generateFakeClosedGroupV2PkStr(); + + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', [ + { pubkeyHex: groupPk } as LegacyGroupInfo, + ]); + TestUtils.stubUserGroupWrapper('getAllGroups', []); + swarmPolling.resetSwarmPolling(); + + Sinon.stub(swarmPolling, 'getPollingTimeout').returns(SWARM_POLLING_TIMEOUT.ACTIVE); + + const { groupsToLeave, legacyGroupsToLeave, toPollDetails } = + await swarmPolling.getPollingDetails([ + { pubkey: PubKey.cast(groupPk), lastPolledTimestamp: 0 }, + { pubkey: PubKey.cast(groupV2Pk), lastPolledTimestamp: 0 }, + { pubkey: PubKey.cast(groupV2Pk2), lastPolledTimestamp: 0 }, + ]); + expect(toPollDetails.length).to.be.eq(2, 'both our and closed group should be polled'); + expect(toPollDetails[0]).to.be.deep.eq([ourNumber, ConversationTypeEnum.PRIVATE]); + expect(toPollDetails[1]).to.be.deep.eq([groupPk, ConversationTypeEnum.GROUP]); + expect(legacyGroupsToLeave.length).to.be.eq(0); + expect(groupsToLeave.length).to.be.eq(2); + expect(groupsToLeave[0]).to.be.deep.eq(groupV2Pk); + expect(groupsToLeave[1]).to.be.deep.eq(groupV2Pk2); + }); + + it('new group in wrapper with a few legacy groups not in wrapper', async () => { + const groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + const groupPkLeg1 = TestUtils.generateFakePubKeyStr(); + const groupPkLeg2 = TestUtils.generateFakePubKeyStr(); + + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', []); + TestUtils.stubUserGroupWrapper('getAllGroups', [{ pubkeyHex: groupPk } as UserGroupsGet]); + + Sinon.stub(swarmPolling, 'getPollingTimeout').returns(SWARM_POLLING_TIMEOUT.ACTIVE); + + const { groupsToLeave, legacyGroupsToLeave, toPollDetails } = + await swarmPolling.getPollingDetails([ + { pubkey: PubKey.cast(groupPk), lastPolledTimestamp: 0 }, + { pubkey: PubKey.cast(groupPkLeg1), lastPolledTimestamp: 0 }, + { pubkey: PubKey.cast(groupPkLeg2), lastPolledTimestamp: 0 }, + ]); + + expect(toPollDetails.length).to.be.eq(2); + expect(toPollDetails[0]).to.be.deep.eq([ourNumber, ConversationTypeEnum.PRIVATE]); + expect(toPollDetails[1]).to.be.deep.eq([groupPk, ConversationTypeEnum.GROUPV2]); + expect(legacyGroupsToLeave.length).to.be.eq(2); + expect(legacyGroupsToLeave[0]).to.be.eq(groupPkLeg1); + expect(legacyGroupsToLeave[1]).to.be.eq(groupPkLeg2); + expect(groupsToLeave.length).to.be.eq(0); + }); + + it('two of each, all should be polled', async () => { + const groupPk1 = TestUtils.generateFakeClosedGroupV2PkStr(); + const groupPk2 = TestUtils.generateFakeClosedGroupV2PkStr(); + const groupPkLeg1 = TestUtils.generateFakePubKeyStr(); + const groupPkLeg2 = TestUtils.generateFakePubKeyStr(); + + TestUtils.stubUserGroupWrapper('getAllLegacyGroups', [ + { pubkeyHex: groupPkLeg1 } as LegacyGroupInfo, + { pubkeyHex: groupPkLeg2 } as LegacyGroupInfo, + ]); + TestUtils.stubUserGroupWrapper('getAllGroups', [ + { pubkeyHex: groupPk1 } as UserGroupsGet, + { pubkeyHex: groupPk2 } as UserGroupsGet, + ]); + + Sinon.stub(swarmPolling, 'getPollingTimeout').returns(SWARM_POLLING_TIMEOUT.ACTIVE); + + const { groupsToLeave, legacyGroupsToLeave, toPollDetails } = + await swarmPolling.getPollingDetails([ + { pubkey: PubKey.cast(groupPk1), lastPolledTimestamp: 0 }, + { pubkey: PubKey.cast(groupPk2), lastPolledTimestamp: 0 }, + { pubkey: PubKey.cast(groupPkLeg1), lastPolledTimestamp: 0 }, + { pubkey: PubKey.cast(groupPkLeg2), lastPolledTimestamp: 0 }, + ]); + + expect(toPollDetails.length).to.be.eq(5); + expect(toPollDetails[0]).to.be.deep.eq([ourNumber, ConversationTypeEnum.PRIVATE]); + expect(toPollDetails[1]).to.be.deep.eq([groupPkLeg1, ConversationTypeEnum.GROUP]); + expect(toPollDetails[2]).to.be.deep.eq([groupPkLeg2, ConversationTypeEnum.GROUP]); + expect(toPollDetails[3]).to.be.deep.eq([groupPk1, ConversationTypeEnum.GROUPV2]); + expect(toPollDetails[4]).to.be.deep.eq([groupPk2, ConversationTypeEnum.GROUPV2]); + + // no groups to leave nor legacy ones + expect(legacyGroupsToLeave.length).to.be.eq(0); + expect(groupsToLeave.length).to.be.eq(0); + }); + }); +}); diff --git a/ts/test/session/unit/swarm_polling/SwarmPolling_test.ts b/ts/test/session/unit/swarm_polling/SwarmPolling_test.ts index 3b1db55f62..da3062033d 100644 --- a/ts/test/session/unit/swarm_polling/SwarmPolling_test.ts +++ b/ts/test/session/unit/swarm_polling/SwarmPolling_test.ts @@ -3,21 +3,19 @@ import { describe } from 'mocha'; import Sinon, * as sinon from 'sinon'; import chaiAsPromised from 'chai-as-promised'; -import { ConversationModel } from '../../../../models/conversation'; -import { getSwarmPollingInstance, SnodePool } from '../../../../session/apis/snode_api'; +import { getSwarmPollingInstance } from '../../../../session/apis/snode_api'; import { resetHardForkCachedValues } from '../../../../session/apis/snode_api/hfHandling'; import { SnodeAPIRetrieve } from '../../../../session/apis/snode_api/retrieveRequest'; import { SwarmPolling } from '../../../../session/apis/snode_api/swarmPolling'; import { SWARM_POLLING_TIMEOUT } from '../../../../session/constants'; -import { getConversationController } from '../../../../session/conversations'; import { PubKey } from '../../../../session/types'; import { UserUtils } from '../../../../session/utils'; -import { ConfigurationSync } from '../../../../session/utils/job_runners/jobs/ConfigurationSyncJob'; -import { sleepFor } from '../../../../session/utils/Promise'; -import { UserGroupsWrapperActions } from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { UserSync } from '../../../../session/utils/job_runners/jobs/UserSyncJob'; import { TestUtils } from '../../../test-utils'; import { generateFakeSnodes, stubData } from '../../../test-utils/utils'; import { ConversationTypeEnum } from '../../../../models/types'; +import { ConvoHub } from '../../../../session/conversations'; +import { SnodePool } from '../../../../session/apis/snode_api/snodePool'; chai.use(chaiAsPromised as any); chai.should(); @@ -26,49 +24,43 @@ const { expect } = chai; describe('SwarmPolling', () => { // Initialize new stubbed cache - const ourPubkey = TestUtils.generateFakePubKey(); - const ourNumber = ourPubkey.key; - - let pollOnceForKeySpy: Sinon.SinonSpy; + const ourNumber = TestUtils.generateFakePubKeyStr(); + const ourPubkey = PubKey.cast(ourNumber); let swarmPolling: SwarmPolling; - let getItemByIdStub: Sinon.SinonStub; - let clock: Sinon.SinonFakeTimers; beforeEach(async () => { - getConversationController().reset(); + ConvoHub.use().reset(); TestUtils.stubWindowFeatureFlags(); - Sinon.stub(ConfigurationSync, 'queueNewJobIfNeeded').resolves(); + Sinon.stub(UserSync, 'queueNewJobIfNeeded').resolves(); // Utils Stubs Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(ourNumber); stubData('getAllConversations').resolves([]); - getItemByIdStub = TestUtils.stubData('getItemById'); stubData('saveConversation').resolves(); stubData('getSwarmNodesForPubkey').resolves(); stubData('getLastHashBySnode').resolves(); Sinon.stub(SnodePool, 'getSwarmFor').resolves(generateFakeSnodes(5)); - Sinon.stub(SnodeAPIRetrieve, 'retrieveNextMessages').resolves([]); + Sinon.stub(SnodeAPIRetrieve, 'retrieveNextMessagesNoRetries').resolves([]); TestUtils.stubWindow('inboxStore', undefined); TestUtils.stubWindow('getGlobalOnlineStatus', () => true); TestUtils.stubWindowLog(); - const convoController = getConversationController(); + const convoController = ConvoHub.use(); await convoController.load(); - getConversationController().getOrCreate(ourPubkey.key, ConversationTypeEnum.PRIVATE); + ConvoHub.use().getOrCreate(ourPubkey.key, ConversationTypeEnum.PRIVATE); swarmPolling = getSwarmPollingInstance(); swarmPolling.resetSwarmPolling(); - pollOnceForKeySpy = Sinon.spy(swarmPolling, 'pollOnceForKey'); clock = sinon.useFakeTimers({ now: Date.now(), shouldAdvanceTime: true }); }); afterEach(() => { Sinon.restore(); - getConversationController().reset(); + ConvoHub.use().reset(); clock.restore(); resetHardForkCachedValues(); }); @@ -85,7 +77,7 @@ describe('SwarmPolling', () => { describe('legacy groups', () => { it('returns ACTIVE for convo with less than two days old activeAt', () => { - const convo = getConversationController().getOrCreate( + const convo = ConvoHub.use().getOrCreate( TestUtils.generateFakePubKeyStr(), ConversationTypeEnum.GROUP ); @@ -96,7 +88,7 @@ describe('SwarmPolling', () => { }); it('returns INACTIVE for convo with undefined activeAt', () => { - const convo = getConversationController().getOrCreate( + const convo = ConvoHub.use().getOrCreate( TestUtils.generateFakePubKeyStr(), ConversationTypeEnum.GROUP ); @@ -107,7 +99,7 @@ describe('SwarmPolling', () => { }); it('returns MEDIUM_ACTIVE for convo with activeAt of more than 2 days but less than a week old', () => { - const convo = getConversationController().getOrCreate( + const convo = ConvoHub.use().getOrCreate( TestUtils.generateFakePubKeyStr(), ConversationTypeEnum.GROUP ); @@ -123,7 +115,7 @@ describe('SwarmPolling', () => { }); it('returns INACTIVE for convo with activeAt of more than a week', () => { - const convo = getConversationController().getOrCreate( + const convo = ConvoHub.use().getOrCreate( TestUtils.generateFakePubKeyStr(), ConversationTypeEnum.GROUP ); @@ -134,11 +126,11 @@ describe('SwarmPolling', () => { }); }); - describe('groupv3', () => { + describe('groupv2', () => { it('returns ACTIVE for convo with less than two days old activeAt', () => { - const convo = getConversationController().getOrCreate( - TestUtils.generateFakeClosedGroupV3PkStr(), - ConversationTypeEnum.GROUPV3 + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakeClosedGroupV2PkStr(), + ConversationTypeEnum.GROUPV2 ); convo.set('active_at', Date.now() - 2 * 23 * 3600 * 1000); // 23 * 2 = 46 hours old expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( @@ -147,9 +139,9 @@ describe('SwarmPolling', () => { }); it('returns INACTIVE for convo with undefined activeAt', () => { - const convo = getConversationController().getOrCreate( - TestUtils.generateFakeClosedGroupV3PkStr(), - ConversationTypeEnum.GROUPV3 + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakeClosedGroupV2PkStr(), + ConversationTypeEnum.GROUPV2 ); convo.set('active_at', undefined); expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( @@ -158,9 +150,9 @@ describe('SwarmPolling', () => { }); it('returns MEDIUM_ACTIVE for convo with activeAt of more than 2 days but less than a week old', () => { - const convo = getConversationController().getOrCreate( - TestUtils.generateFakeClosedGroupV3PkStr(), - ConversationTypeEnum.GROUPV3 + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakeClosedGroupV2PkStr(), + ConversationTypeEnum.GROUPV2 ); convo.set('active_at', Date.now() - 1000 * 3600 * 25 * 2); // 25 hours x 2 = 50 hours old expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( @@ -174,9 +166,9 @@ describe('SwarmPolling', () => { }); it('returns INACTIVE for convo with activeAt of more than a week', () => { - const convo = getConversationController().getOrCreate( - TestUtils.generateFakeClosedGroupV3PkStr(), - ConversationTypeEnum.GROUPV3 + const convo = ConvoHub.use().getOrCreate( + TestUtils.generateFakeClosedGroupV2PkStr(), + ConversationTypeEnum.GROUPV2 ); convo.set('active_at', Date.now() - 1000 * 3600 * 24 * 8); // 8 days expect(swarmPolling.getPollingTimeout(PubKey.cast(convo.id as string))).to.eq( @@ -185,267 +177,4 @@ describe('SwarmPolling', () => { }); }); }); - - describe('pollForAllKeys', () => { - beforeEach(() => { - stubData('createOrUpdateItem').resolves(); - }); - afterEach(() => { - Sinon.restore(); - }); - it('does run for our pubkey even if activeAt is really old ', async () => { - const convo = getConversationController().getOrCreate( - ourNumber, - ConversationTypeEnum.PRIVATE - ); - convo.set('active_at', Date.now() - 1000 * 3600 * 25); - await swarmPolling.start(true); - - expect(pollOnceForKeySpy.callCount).to.eq(1); - expect(pollOnceForKeySpy.firstCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - }); - - it('does run for our pubkey even if activeAt is recent ', async () => { - const convo = getConversationController().getOrCreate( - ourNumber, - ConversationTypeEnum.PRIVATE - ); - convo.set('active_at', Date.now()); - await swarmPolling.start(true); - - expect(pollOnceForKeySpy.callCount).to.eq(1); - expect(pollOnceForKeySpy.firstCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - }); - - describe('legacy group', () => { - it('does run for group pubkey on start no matter the recent timestamp', async () => { - const convo = getConversationController().getOrCreate( - TestUtils.generateFakePubKeyStr(), - ConversationTypeEnum.GROUP - ); - TestUtils.stubLibSessionWorker(undefined); - convo.set('active_at', Date.now()); - const groupConvoPubkey = PubKey.cast(convo.id as string); - swarmPolling.addGroupId(groupConvoPubkey); - await swarmPolling.start(true); - - // our pubkey will be polled for, hence the 2 - expect(pollOnceForKeySpy.callCount).to.eq(2); - expect(pollOnceForKeySpy.firstCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.secondCall.args).to.deep.eq([groupConvoPubkey, true, [-10]]); - }); - - it('does only poll from -10 for closed groups if HF >= 19.1 ', async () => { - const convo = getConversationController().getOrCreate( - TestUtils.generateFakePubKeyStr(), - ConversationTypeEnum.GROUP - ); - TestUtils.stubLibSessionWorker(undefined); - getItemByIdStub.restore(); - getItemByIdStub = TestUtils.stubData('getItemById'); - getItemByIdStub - .withArgs('hasSeenHardfork190') - .resolves({ id: 'hasSeenHardfork190', value: true }) - .withArgs('hasSeenHardfork191') - .resolves({ id: 'hasSeenHardfork191', value: true }); - - convo.set('active_at', 1); - const groupConvoPubkey = PubKey.cast(convo.id as string); - swarmPolling.addGroupId(groupConvoPubkey); - - await swarmPolling.start(true); - - // our pubkey will be polled for, hence the 2 - expect(pollOnceForKeySpy.callCount).to.eq(2); - expect(pollOnceForKeySpy.firstCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.secondCall.args).to.deep.eq([groupConvoPubkey, true, [-10]]); - getItemByIdStub.restore(); - getItemByIdStub = TestUtils.stubData('getItemById'); - - getItemByIdStub.resolves(); - }); - - it('does run for group pubkey on start but not another time if activeAt is old ', async () => { - const convo = getConversationController().getOrCreate( - TestUtils.generateFakePubKeyStr(), - ConversationTypeEnum.GROUP - ); - TestUtils.stubLibSessionWorker(undefined); - - convo.set('active_at', 1); // really old, but active - const groupConvoPubkey = PubKey.cast(convo.id as string); - swarmPolling.addGroupId(groupConvoPubkey); - - // this calls the stub 2 times, one for our direct pubkey and one for the group - await swarmPolling.start(true); - - // this should only call the stub one more time: for our direct pubkey but not for the group pubkey - await swarmPolling.pollForAllKeys(); - - expect(pollOnceForKeySpy.callCount).to.eq(3); - expect(pollOnceForKeySpy.firstCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.secondCall.args).to.deep.eq([groupConvoPubkey, true, [-10]]); - expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - }); - - it('does run twice if activeAt less than one hour ', async () => { - const convo = getConversationController().getOrCreate( - TestUtils.generateFakePubKeyStr(), - ConversationTypeEnum.GROUP - ); - - // fake that the group is part of the wrapper otherwise we stop tracking it after the first polling event - Sinon.stub(UserGroupsWrapperActions, 'getLegacyGroup').resolves({} as any); - - convo.set('active_at', Date.now()); - const groupConvoPubkey = PubKey.cast(convo.id as string); - swarmPolling.addGroupId(groupConvoPubkey); - await swarmPolling.start(true); - expect(pollOnceForKeySpy.callCount).to.eq(2); - expect(pollOnceForKeySpy.firstCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.secondCall.args).to.deep.eq([groupConvoPubkey, true, [-10]]); - pollOnceForKeySpy.resetHistory(); - clock.tick(9000); - - // no need to do that as the tick will trigger a call in all cases after 5 secs await swarmPolling.pollForAllKeys(); - /** this is not easy to explain, but - * - during the swarmPolling.start, we get two calls to pollOnceForKeySpy (one for our id and one for group id) - * - the clock ticks 9sec, and another call of pollOnceForKeySpy get started, but as we do not await them, this test fails. - * the only fix is to restore the clock and force the a small sleep to let the thing run in bg - */ - - await sleepFor(10); - - expect(pollOnceForKeySpy.callCount).to.eq(2); - expect(pollOnceForKeySpy.firstCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.secondCall.args).to.deep.eq([groupConvoPubkey, true, [-10]]); - }); - - it('does run twice if activeAt is inactive and we tick longer than 2 minutes', async () => { - const convo = getConversationController().getOrCreate( - TestUtils.generateFakePubKeyStr(), - ConversationTypeEnum.GROUP - ); - // fake that the group is part of the wrapper otherwise we stop tracking it after the first polling event - Sinon.stub(UserGroupsWrapperActions, 'getLegacyGroup').resolves({} as any); - pollOnceForKeySpy.resetHistory(); - convo.set('active_at', Date.now()); - const groupConvoPubkey = PubKey.cast(convo.id as string); - swarmPolling.addGroupId(groupConvoPubkey); - // this call the stub two times already, one for our direct pubkey and one for the group - await swarmPolling.start(true); - const timeToTick = 3 * 60 * 1000; - swarmPolling.forcePolledTimestamp(groupConvoPubkey, Date.now() - timeToTick); - // more than week old, so inactive group but we have to tick after more than 2 min - convo.set('active_at', Date.now() - 7 * 25 * 3600 * 1000); - clock.tick(timeToTick); - /** this is not easy to explain, but - * - during the swarmPolling.start, we get two calls to pollOnceForKeySpy (one for our id and one for group od) - * - the clock ticks 9sec, and another call of pollOnceForKeySpy get started, but as we do not await them, this test fails. - * the only fix is to restore the clock and force the a small sleep to let the thing run in bg - */ - await sleepFor(10); - // we should have two more calls here, so 4 total. - expect(pollOnceForKeySpy.callCount).to.eq(4); - expect(pollOnceForKeySpy.firstCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.secondCall.args).to.deep.eq([groupConvoPubkey, true, [-10]]); - expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.getCalls()[3].args).to.deep.eq([groupConvoPubkey, true, [-10]]); - }); - - it('does run once only if group is inactive and we tick less than 2 minutes ', async () => { - const convo = getConversationController().getOrCreate( - TestUtils.generateFakePubKeyStr(), - ConversationTypeEnum.GROUP - ); - pollOnceForKeySpy.resetHistory(); - TestUtils.stubLibSessionWorker(undefined); - convo.set('active_at', Date.now()); - const groupConvoPubkey = PubKey.cast(convo.id as string); - swarmPolling.addGroupId(groupConvoPubkey); - await swarmPolling.start(true); - - // more than a week old, we should not tick after just 5 seconds - convo.set('active_at', Date.now() - 7 * 24 * 3600 * 1000 - 3600 * 1000); - - clock.tick(1 * 60 * 1000); - await sleepFor(10); - - // we should have only one more call here, the one for our direct pubkey fetch - expect(pollOnceForKeySpy.callCount).to.eq(3); - expect(pollOnceForKeySpy.secondCall.args).to.deep.eq([groupConvoPubkey, true, [-10]]); // this one comes from the swarmPolling.start - expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - }); - - describe('multiple runs', () => { - let convo: ConversationModel; - let groupConvoPubkey: PubKey; - - beforeEach(async () => { - convo = getConversationController().getOrCreate( - TestUtils.generateFakePubKeyStr(), - ConversationTypeEnum.GROUP - ); - TestUtils.stubLibSessionWorker({}); - - convo.set('active_at', Date.now()); - groupConvoPubkey = PubKey.cast(convo.id as string); - swarmPolling.addGroupId(groupConvoPubkey); - await swarmPolling.start(true); - }); - - afterEach(() => { - Sinon.restore(); - getConversationController().reset(); - clock.restore(); - resetHardForkCachedValues(); - }); - - it('does run twice if activeAt is less than 2 days', async () => { - pollOnceForKeySpy.resetHistory(); - // less than 2 days old, this is an active group - convo.set('active_at', Date.now() - 2 * 24 * 3600 * 1000 - 3600 * 1000); - - const timeToTick = 6 * 1000; - - swarmPolling.forcePolledTimestamp(convo.id, timeToTick); - // we tick more than 5 sec - clock.tick(timeToTick); - - await swarmPolling.pollForAllKeys(); - // we have 4 calls total. 2 for our direct promises run each 5 seconds, and 2 for the group pubkey active (so run every 5 sec too) - expect(pollOnceForKeySpy.callCount).to.eq(4); - // first two calls are our pubkey - expect(pollOnceForKeySpy.firstCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.secondCall.args).to.deep.eq([groupConvoPubkey, true, [-10]]); - - expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.getCalls()[3].args).to.deep.eq([groupConvoPubkey, true, [-10]]); - }); - - it('does run twice if activeAt is more than 2 days old and we tick more than one minute', async () => { - pollOnceForKeySpy.resetHistory(); - TestUtils.stubWindowLog(); - convo.set('active_at', Date.now() - 2 * 25 * 3600 * 1000); // medium active - // fake that the group is part of the wrapper otherwise we stop tracking it after the first polling event - - const timeToTick = 65 * 1000; // more than one minute - swarmPolling.forcePolledTimestamp(convo.id, timeToTick); - clock.tick(timeToTick); // should tick twice more (one more our direct pubkey and one for the group) - - // fake that the group is part of the wrapper otherwise we stop tracking it after the first polling event - - await swarmPolling.pollForAllKeys(); - - expect(pollOnceForKeySpy.callCount).to.eq(4); - - // first two calls are our pubkey - expect(pollOnceForKeySpy.firstCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.secondCall.args).to.deep.eq([groupConvoPubkey, true, [-10]]); - expect(pollOnceForKeySpy.thirdCall.args).to.deep.eq([ourPubkey, false, [0, 2, 3, 5, 4]]); - expect(pollOnceForKeySpy.getCalls()[3].args).to.deep.eq([groupConvoPubkey, true, [-10]]); - }); - }); - }); - }); }); diff --git a/ts/test/session/unit/types/PubKey_test.ts b/ts/test/session/unit/types/PubKey_test.ts new file mode 100644 index 0000000000..ead911b60c --- /dev/null +++ b/ts/test/session/unit/types/PubKey_test.ts @@ -0,0 +1,73 @@ +import { expect } from 'chai'; +import { PubKey } from '../../../../session/types'; + +const defaultErr = 'Invalid pubkey string passed'; + +describe('PubKey constructor', () => { + it('does not throw with 05 prefix, right length and only hex chars', () => { + expect(() => new PubKey(`05${'0'.repeat(64)}`)).to.not.throw(); + }); + + it('does not throw with 15 prefix, right length and only hex chars', () => { + expect(() => new PubKey(`15${'0'.repeat(64)}`)).to.not.throw(); + }); + it('does not throw with 03 prefix, right length and only hex chars', () => { + expect(() => new PubKey(`03${'0'.repeat(64)}`)).to.not.throw(); + }); + it('does not throw with 25 prefix, right length and only hex chars', () => { + expect(() => new PubKey(`25${'0'.repeat(64)}`)).to.not.throw(); + }); + it('does not throw with 05 and textsecure prefix, right length and only hex chars', () => { + expect(() => new PubKey(`__textsecure_group__!05${'0'.repeat(64)}`)).to.not.throw(); + }); + + it('throws with null', () => { + expect(() => new PubKey(null as any)).to.throw(defaultErr); + }); + + it('throws with undefined', () => { + expect(() => new PubKey(undefined as any)).to.throw(defaultErr); + }); + + it('throws with empty string', () => { + expect(() => new PubKey('')).to.throw(defaultErr); + }); + it('throws with incorrect prefix', () => { + expect(() => new PubKey(`95${'0'.repeat(64)}`)).to.throw(defaultErr); + }); + + describe('05 prefix', () => { + it('throws with non-hex chars', () => { + expect(() => new PubKey(`05${'0'.repeat(63)}(`)).to.throw(defaultErr); + }); + + it('throws with incorrect length', () => { + expect(() => new PubKey(`05${'0'.repeat(63)}`)).to.throw(defaultErr); + }); + + // Currently we allow pubkeys of length 52 if they have a length of + // it('throws with incorrect length -2', () => { + // expect(() => new PubKey(`05${'0'.repeat(62)}`)).to.throw(defaultErr); + // }); + }); + + describe('25 prefix', () => { + it('throws with non-hex chars', () => { + expect(() => new PubKey(`25${'0'.repeat(63)}(`)).to.throw(defaultErr); + }); + + it('throws with incorrect length -1', () => { + expect(() => new PubKey(`25${'0'.repeat(63)}`)).to.throw(defaultErr); + }); + }); + + describe('03 prefix', () => { + it('throws with non-hex chars', () => { + expect(() => new PubKey(`03${'0'.repeat(63)}(`)).to.throw(defaultErr); + }); + + it('throws with incorrect length -1', () => { + expect(() => new PubKey(`03${'0'.repeat(63)}`)).to.throw(defaultErr); + }); + }); +}); diff --git a/ts/test/session/unit/updater/updater_test.ts b/ts/test/session/unit/updater/updater_test.ts index c6880182dd..e3354b011b 100644 --- a/ts/test/session/unit/updater/updater_test.ts +++ b/ts/test/session/unit/updater/updater_test.ts @@ -2,6 +2,8 @@ import path from 'path'; import { readFileSync } from 'fs-extra'; import { isEmpty } from 'lodash'; +import { expect } from 'chai'; +import { enableLogRedirect } from '../../../test-utils/utils'; describe('Updater', () => { it.skip('isUpdateAvailable', () => {}); @@ -18,4 +20,11 @@ describe('Updater', () => { ); } }); + + it('stubWindowLog is set to false before pushing', () => { + expect(enableLogRedirect).to.be.eq( + false, + 'If you see this message, just set `enableLogRedirect` to false in `ts/test/test-utils/utils/stubbing.ts' + ); + }); }); diff --git a/ts/test/session/unit/utils/Messages_test.ts b/ts/test/session/unit/utils/Messages_test.ts index 4f8ba3b4ff..79e7eb00d5 100644 --- a/ts/test/session/unit/utils/Messages_test.ts +++ b/ts/test/session/unit/utils/Messages_test.ts @@ -1,30 +1,21 @@ /* eslint-disable no-unused-expressions */ import chai from 'chai'; import chaiAsPromised from 'chai-as-promised'; -import { beforeEach } from 'mocha'; import Sinon from 'sinon'; -import { ConfigurationMessage } from '../../../../session/messages/outgoing/controlMessage/ConfigurationMessage'; import { ClosedGroupVisibleMessage } from '../../../../session/messages/outgoing/visibleMessage/ClosedGroupVisibleMessage'; import { PubKey } from '../../../../session/types'; -import { MessageUtils, UserUtils } from '../../../../session/utils'; +import { MessageUtils } from '../../../../session/utils'; import { TestUtils } from '../../../test-utils'; -import { OpenGroupData } from '../../../../data/opengroups'; import { SignalService } from '../../../../protobuf'; -import { getOpenGroupV2ConversationId } from '../../../../session/apis/open_group_api/utils/OpenGroupUtils'; import { SnodeNamespaces } from '../../../../session/apis/snode_api/namespaces'; -import { getConversationController } from '../../../../session/conversations'; import { ClosedGroupAddedMembersMessage } from '../../../../session/messages/outgoing/controlMessage/group/ClosedGroupAddedMembersMessage'; import { ClosedGroupEncryptionPairMessage } from '../../../../session/messages/outgoing/controlMessage/group/ClosedGroupEncryptionPairMessage'; import { ClosedGroupEncryptionPairReplyMessage } from '../../../../session/messages/outgoing/controlMessage/group/ClosedGroupEncryptionPairReplyMessage'; import { ClosedGroupNameChangeMessage } from '../../../../session/messages/outgoing/controlMessage/group/ClosedGroupNameChangeMessage'; import { ClosedGroupNewMessage } from '../../../../session/messages/outgoing/controlMessage/group/ClosedGroupNewMessage'; import { ClosedGroupRemovedMembersMessage } from '../../../../session/messages/outgoing/controlMessage/group/ClosedGroupRemovedMembersMessage'; -import { getCurrentConfigurationMessage } from '../../../../session/utils/sync/syncUtils'; -import { stubData, stubOpenGroupData } from '../../../test-utils/utils'; -import { OpenGroupV2Room } from '../../../../data/types'; -import { ConversationTypeEnum } from '../../../../models/types'; chai.use(chaiAsPromised as any); @@ -51,7 +42,7 @@ describe('Message Utils', () => { SnodeNamespaces.UserContacts ); - expect(Object.keys(rawMessage)).to.have.length(6); + expect(Object.keys(rawMessage)).to.have.length(7); expect(rawMessage.identifier).to.exist; expect(rawMessage.namespace).to.exist; @@ -59,23 +50,21 @@ describe('Message Utils', () => { expect(rawMessage.encryption).to.exist; expect(rawMessage.plainTextBuffer).to.exist; expect(rawMessage.ttl).to.exist; + expect(rawMessage.networkTimestampCreated).to.exist; expect(rawMessage.identifier).to.equal(message.identifier); expect(rawMessage.device).to.equal(device.key); expect(rawMessage.plainTextBuffer).to.deep.equal(message.plainTextBuffer()); expect(rawMessage.ttl).to.equal(message.ttl()); expect(rawMessage.namespace).to.equal(3); + expect(rawMessage.networkTimestampCreated).to.eq(message.createAtNetworkTimestamp); }); it('should generate valid plainTextBuffer', async () => { const device = TestUtils.generateFakePubKey(); const message = TestUtils.generateVisibleMessage(); - const rawMessage = await MessageUtils.toRawMessage( - device, - message, - SnodeNamespaces.UserMessages - ); + const rawMessage = await MessageUtils.toRawMessage(device, message, SnodeNamespaces.Default); const rawBuffer = rawMessage.plainTextBuffer; const rawBufferJSON = JSON.stringify(rawBuffer); @@ -95,11 +84,7 @@ describe('Message Utils', () => { const device = TestUtils.generateFakePubKey(); const message = TestUtils.generateVisibleMessage(); - const rawMessage = await MessageUtils.toRawMessage( - device, - message, - SnodeNamespaces.UserMessages - ); + const rawMessage = await MessageUtils.toRawMessage(device, message, SnodeNamespaces.Default); const derivedPubKey = PubKey.from(rawMessage.device); expect(derivedPubKey).to.not.be.eq(undefined, 'should maintain pubkey'); @@ -111,31 +96,22 @@ describe('Message Utils', () => { it('should set encryption to ClosedGroup if a ClosedGroupVisibleMessage is passed in', async () => { const device = TestUtils.generateFakePubKey(); - const groupId = TestUtils.generateFakePubKey(); + const groupId = TestUtils.generateFakePubKeyStr(); const chatMessage = TestUtils.generateVisibleMessage(); const message = new ClosedGroupVisibleMessage({ groupId, - timestamp: Date.now(), chatMessage, ...sharedNoExpire, }); - const rawMessage = await MessageUtils.toRawMessage( - device, - message, - SnodeNamespaces.UserMessages - ); + const rawMessage = await MessageUtils.toRawMessage(device, message, SnodeNamespaces.Default); expect(rawMessage.encryption).to.equal(SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE); }); it('should set encryption to Fallback on other messages', async () => { const device = TestUtils.generateFakePubKey(); const message = TestUtils.generateVisibleMessage(); - const rawMessage = await MessageUtils.toRawMessage( - device, - message, - SnodeNamespaces.UserMessages - ); + const rawMessage = await MessageUtils.toRawMessage(device, message, SnodeNamespaces.Default); expect(rawMessage.encryption).to.equal(SignalService.Envelope.Type.SESSION_MESSAGE); }); @@ -145,7 +121,7 @@ describe('Message Utils', () => { const member = TestUtils.generateFakePubKey().key; const msg = new ClosedGroupNewMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), name: 'df', members: [member], admins: [member], @@ -154,7 +130,7 @@ describe('Message Utils', () => { ...sharedNoExpire, expireTimer: 0, }); - const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.UserMessages); + const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.Default); expect(rawMessage.encryption).to.equal(SignalService.Envelope.Type.SESSION_MESSAGE); }); @@ -162,12 +138,12 @@ describe('Message Utils', () => { const device = TestUtils.generateFakePubKey(); const msg = new ClosedGroupNameChangeMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), name: 'df', groupId: TestUtils.generateFakePubKey().key, ...sharedNoExpire, }); - const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.UserMessages); + const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.Default); expect(rawMessage.encryption).to.equal(SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE); }); @@ -175,12 +151,12 @@ describe('Message Utils', () => { const device = TestUtils.generateFakePubKey(); const msg = new ClosedGroupAddedMembersMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), addedMembers: [TestUtils.generateFakePubKey().key], groupId: TestUtils.generateFakePubKey().key, ...sharedNoExpire, }); - const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.UserMessages); + const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.Default); expect(rawMessage.encryption).to.equal(SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE); }); @@ -188,12 +164,12 @@ describe('Message Utils', () => { const device = TestUtils.generateFakePubKey(); const msg = new ClosedGroupRemovedMembersMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), removedMembers: [TestUtils.generateFakePubKey().key], groupId: TestUtils.generateFakePubKey().key, ...sharedNoExpire, }); - const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.UserMessages); + const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.Default); expect(rawMessage.encryption).to.equal(SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE); }); @@ -209,12 +185,12 @@ describe('Message Utils', () => { }) ); const msg = new ClosedGroupEncryptionPairMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), groupId: TestUtils.generateFakePubKey().key, encryptedKeyPairs: fakeWrappers, ...sharedNoExpire, }); - const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.UserMessages); + const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.Default); expect(rawMessage.encryption).to.equal(SignalService.Envelope.Type.CLOSED_GROUP_MESSAGE); }); @@ -230,106 +206,13 @@ describe('Message Utils', () => { }) ); const msg = new ClosedGroupEncryptionPairReplyMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), groupId: TestUtils.generateFakePubKey().key, encryptedKeyPairs: fakeWrappers, ...sharedNoExpire, }); - const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.UserMessages); - expect(rawMessage.encryption).to.equal(SignalService.Envelope.Type.SESSION_MESSAGE); - }); - - it('passing a ConfigurationMessage returns Fallback', async () => { - const device = TestUtils.generateFakePubKey(); - - const msg = new ConfigurationMessage({ - timestamp: Date.now(), - activeOpenGroups: [], - activeClosedGroups: [], - displayName: 'displayName', - contacts: [], - }); - const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.UserMessages); + const rawMessage = await MessageUtils.toRawMessage(device, msg, SnodeNamespaces.Default); expect(rawMessage.encryption).to.equal(SignalService.Envelope.Type.SESSION_MESSAGE); }); }); - - describe('getCurrentConfigurationMessage', () => { - const ourNumber = TestUtils.generateFakePubKey().key; - - beforeEach(async () => { - Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').resolves(ourNumber); - Sinon.stub(UserUtils, 'getOurPubKeyFromCache').resolves(PubKey.cast(ourNumber)); - stubData('getAllConversations').resolves([]); - stubData('saveConversation').resolves(); - stubOpenGroupData('getAllV2OpenGroupRooms').resolves(); - TestUtils.stubData('getItemById').callsFake(async () => { - return { value: '[]' }; - }); - getConversationController().reset(); - - await getConversationController().load(); - }); - - afterEach(() => { - Sinon.restore(); - }); - - // open groups are actually removed when we leave them so this doesn't make much sense, but just in case we break something later - it('filter out non active open groups', async () => { - await getConversationController().getOrCreateAndWait( - '05123456789', - ConversationTypeEnum.PRIVATE - ); - await getConversationController().getOrCreateAndWait( - '0512345678', - ConversationTypeEnum.PRIVATE - ); - - const convoId3 = getOpenGroupV2ConversationId('http://chat-dev2.lokinet.org', 'fish'); - const convoId4 = getOpenGroupV2ConversationId('http://chat-dev3.lokinet.org', 'fish2'); - const convoId5 = getOpenGroupV2ConversationId('http://chat-dev3.lokinet.org', 'fish3'); - - const convo3 = await getConversationController().getOrCreateAndWait( - convoId3, - ConversationTypeEnum.GROUP - ); - convo3.set({ active_at: Date.now() }); - - stubOpenGroupData('getV2OpenGroupRoom') - .returns(null) - .withArgs(convoId3) - .returns({ - serverUrl: 'http://chat-dev2.lokinet.org', - roomId: 'fish', - serverPublicKey: 'serverPublicKey', - } as OpenGroupV2Room); - - const convo4 = await getConversationController().getOrCreateAndWait( - convoId4, - ConversationTypeEnum.GROUP - ); - convo4.set({ active_at: undefined }); - - await OpenGroupData.opengroupRoomsLoad(); - const convo5 = await getConversationController().getOrCreateAndWait( - convoId5, - ConversationTypeEnum.GROUP - ); - convo5.set({ active_at: 0 }); - - await getConversationController().getOrCreateAndWait( - '051234567', - ConversationTypeEnum.PRIVATE - ); - const convos = getConversationController().getConversations(); - - // convoID3 is active but 4 and 5 are not - const configMessage = await getCurrentConfigurationMessage(convos); - expect(configMessage.activeOpenGroups.length).to.equal(1); - expect(configMessage.activeOpenGroups[0]).to.equal( - 'http://chat-dev2.lokinet.org/fish?public_key=serverPublicKey' - ); - }); - }); }); diff --git a/ts/test/session/unit/utils/OpenGroupUtils_test.ts b/ts/test/session/unit/utils/OpenGroupUtils_test.ts index a86cd4b744..0dc372ffd9 100644 --- a/ts/test/session/unit/utils/OpenGroupUtils_test.ts +++ b/ts/test/session/unit/utils/OpenGroupUtils_test.ts @@ -96,7 +96,7 @@ describe('OpenGroupUtils', () => { serverPublicKey: '', serverUrl: 'https://example.org', }) - ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomid and serverUrl to be set'); + ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomId and serverUrl to be set'); }); it('throws if serverUrl is empty', () => { @@ -106,7 +106,7 @@ describe('OpenGroupUtils', () => { serverPublicKey: '05123456789', serverUrl: '', }) - ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomid and serverUrl to be set'); + ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomId and serverUrl to be set'); }); it('throws if roomId is empty', () => { @@ -116,7 +116,7 @@ describe('OpenGroupUtils', () => { serverPublicKey: '05123456789', serverUrl: 'https://example.org', }) - ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomid and serverUrl to be set'); + ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomId and serverUrl to be set'); }); it('throws if pubkey is null', () => { expect(() => @@ -125,7 +125,7 @@ describe('OpenGroupUtils', () => { serverPublicKey: null as any, serverUrl: 'https://example.org', }) - ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomid and serverUrl to be set'); + ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomId and serverUrl to be set'); }); it('throws if serverUrl is null', () => { @@ -135,7 +135,7 @@ describe('OpenGroupUtils', () => { serverPublicKey: '05123456789', serverUrl: null as any, }) - ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomid and serverUrl to be set'); + ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomId and serverUrl to be set'); }); it('throws if roomId is null', () => { @@ -145,7 +145,7 @@ describe('OpenGroupUtils', () => { serverPublicKey: '05123456789', serverUrl: 'https://example.org', }) - ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomid and serverUrl to be set'); + ).to.throw('getCompleteUrlFromRoom needs serverPublicKey, roomId and serverUrl to be set'); }); }); }); diff --git a/ts/test/session/unit/utils/Promise_test.ts b/ts/test/session/unit/utils/Promise_test.ts index 80f0c5ae01..43ec7e439b 100644 --- a/ts/test/session/unit/utils/Promise_test.ts +++ b/ts/test/session/unit/utils/Promise_test.ts @@ -1,6 +1,6 @@ import chai from 'chai'; -import Sinon, * as sinon from 'sinon'; import chaiAsPromised from 'chai-as-promised'; +import Sinon, * as sinon from 'sinon'; import { PromiseUtils } from '../../../../session/utils'; @@ -10,7 +10,6 @@ import { sleepFor, } from '../../../../session/utils/Promise'; import { TestUtils } from '../../../test-utils'; -import { enableLogRedirect } from '../../../test-utils/utils'; chai.use(chaiAsPromised as any); chai.should(); @@ -207,11 +206,4 @@ describe('Promise Utils', () => { expect(hasAlreadyOneAtaTimeMatching('testing2')).to.be.eq(false, 'should be false'); }); }); - - it('stubWindowLog is set to false before pushing', () => { - expect( - enableLogRedirect, - 'If you see this message, just set `enableLogRedirect` to false in `ts/test/test-utils/utils/stubbing.ts`' - ).to.be.eq(false); - }); }); diff --git a/ts/test/session/unit/utils/job_runner/JobRunner_test.ts b/ts/test/session/unit/utils/job_runner/JobRunner_test.ts index 72bfc70a40..96b5b66b7e 100644 --- a/ts/test/session/unit/utils/job_runner/JobRunner_test.ts +++ b/ts/test/session/unit/utils/job_runner/JobRunner_test.ts @@ -2,15 +2,15 @@ import { expect } from 'chai'; import { isUndefined } from 'lodash'; import Sinon from 'sinon'; import { v4 } from 'uuid'; +import { sleepFor } from '../../../../../session/utils/Promise'; import { PersistedJobRunner } from '../../../../../session/utils/job_runners/JobRunner'; -import { FakeSleepForJob, FakeSleepForMultiJob } from './FakeSleepForJob'; import { FakeSleepForMultiJobData, FakeSleepJobData, } from '../../../../../session/utils/job_runners/PersistedJob'; -import { sleepFor } from '../../../../../session/utils/Promise'; -import { stubData } from '../../../../test-utils/utils'; import { TestUtils } from '../../../../test-utils'; +import { stubData } from '../../../../test-utils/utils'; +import { FakeSleepForJob, FakeSleepForMultiJob } from './FakeSleepForJob'; function getFakeSleepForJob(timestamp: number): FakeSleepForJob { const job = new FakeSleepForJob({ @@ -201,33 +201,19 @@ describe('JobRunner', () => { expect(runnerMulti.getJobList()).to.deep.eq([job.serializeJob(), job2.serializeJob()]); expect(runnerMulti.getCurrentJobIdentifier()).to.be.equal(job.persistedData.identifier); - console.info( - 'runnerMulti.getJobList() initial', - runnerMulti.getJobList().map(m => m.identifier), - Date.now() - ); - console.info('=========== awaiting first job =========='); - // each job takes 5s to finish, so let's tick once the first one should be done clock.tick(5000); expect(runnerMulti.getCurrentJobIdentifier()).to.be.equal(job.persistedData.identifier); let awaited = await runnerMulti.waitCurrentJob(); expect(awaited).to.eq('await'); await sleepFor(10); - - console.info('=========== awaited first job =========='); expect(runnerMulti.getCurrentJobIdentifier()).to.be.equal(job2.persistedData.identifier); - - console.info('=========== awaiting second job =========='); - clock.tick(5000); awaited = await runnerMulti.waitCurrentJob(); expect(awaited).to.eq('await'); await sleepFor(10); // those sleep for is just to let the runner the time to finish writing the tests to the DB and exit the handling of the previous test - console.info('=========== awaited second job =========='); - expect(runnerMulti.getCurrentJobIdentifier()).to.eq(null); expect(runnerMulti.getJobList()).to.deep.eq([]); @@ -246,27 +232,22 @@ describe('JobRunner', () => { expect(runnerMulti.getCurrentJobIdentifier()).to.be.equal(job.persistedData.identifier); clock.tick(5000); - console.info('=========== awaiting first job =========='); await runnerMulti.waitCurrentJob(); // just give some time for the runnerMulti to pick up a new job await sleepFor(10); expect(runnerMulti.getJobList()).to.deep.eq([]); expect(runnerMulti.getCurrentJobIdentifier()).to.be.equal(null); - console.info('=========== awaited first job =========='); // the first job should already be finished now result = await runnerMulti.addJob(job2); expect(result).to.eq('job_started'); expect(runnerMulti.getJobList()).to.deep.eq([job2.serializeJob()]); - console.info('=========== awaiting second job =========='); - // each job takes 5s to finish, so let's tick once the first one should be done clock.tick(5010); await runnerMulti.waitCurrentJob(); await sleepFor(10); - console.info('=========== awaited second job =========='); expect(runnerMulti.getJobList()).to.deep.eq([]); }); diff --git a/ts/test/session/unit/utils/job_runner/group_sync_job/GroupSyncJob_test.ts b/ts/test/session/unit/utils/job_runner/group_sync_job/GroupSyncJob_test.ts new file mode 100644 index 0000000000..bb7951f2ae --- /dev/null +++ b/ts/test/session/unit/utils/job_runner/group_sync_job/GroupSyncJob_test.ts @@ -0,0 +1,389 @@ +import { expect } from 'chai'; +import { GroupPubkeyType, UserGroupsGet } from 'libsession_util_nodejs'; +import { omit } from 'lodash'; +import Long from 'long'; +import Sinon from 'sinon'; +import { getSodiumNode } from '../../../../../../node/sodiumNode'; +import { NotEmptyArrayOfBatchResults } from '../../../../../../session/apis/snode_api/SnodeRequestTypes'; +import { SnodeNamespaces } from '../../../../../../session/apis/snode_api/namespaces'; +import { ConvoHub } from '../../../../../../session/conversations'; +import { LibSodiumWrappers } from '../../../../../../session/crypto'; +import { MessageSender } from '../../../../../../session/sending'; +import { UserUtils } from '../../../../../../session/utils'; +import { RunJobResult } from '../../../../../../session/utils/job_runners/PersistedJob'; +import { GroupSync } from '../../../../../../session/utils/job_runners/jobs/GroupSyncJob'; +import { + GroupDestinationChanges, + GroupSuccessfulChange, + LibSessionUtil, + PendingChangesForGroup, +} from '../../../../../../session/utils/libsession/libsession_utils'; +import { MetaGroupWrapperActions } from '../../../../../../webworker/workers/browser/libsession_worker_interface'; +import { TestUtils } from '../../../../../test-utils'; +import { stubWindowFeatureFlags, stubWindowLog, TypedStub } from '../../../../../test-utils/utils'; +import { NetworkTime } from '../../../../../../util/NetworkTime'; + +function validInfo(sodium: LibSodiumWrappers) { + return { + type: 'GroupInfo', + ciphertext: sodium.randombytes_buf(12), + seqno: Long.fromNumber(123), + namespace: SnodeNamespaces.ClosedGroupInfo, + timestamp: 1234, + } as const; +} +function validMembers(sodium: LibSodiumWrappers) { + return { + type: 'GroupMember', + ciphertext: sodium.randombytes_buf(12), + seqno: Long.fromNumber(321), + namespace: SnodeNamespaces.ClosedGroupMembers, + timestamp: 4321, + } as const; +} + +function validKeys(sodium: LibSodiumWrappers) { + return { + type: 'GroupKeys', + ciphertext: sodium.randombytes_buf(12), + namespace: SnodeNamespaces.ClosedGroupKeys, + timestamp: 3333, + } as const; +} + +function validUserGroup03WithSecKey(pubkey?: GroupPubkeyType) { + const group: UserGroupsGet = { + authData: new Uint8Array(30), + secretKey: new Uint8Array(30), + destroyed: false, + invitePending: false, + joinedAtSeconds: Date.now(), + kicked: false, + priority: 0, + pubkeyHex: pubkey || TestUtils.generateFakeClosedGroupV2PkStr(), + name: 'Valid usergroup 03', + disappearingTimerSeconds: 0, + }; + return group; +} + +describe('GroupSyncJob run()', () => { + afterEach(() => { + Sinon.restore(); + }); + it('does not throw if no user keys', async () => { + const job = new GroupSync.GroupSyncJob({ + identifier: TestUtils.generateFakeClosedGroupV2PkStr(), + }); + + const func = async () => job.run(); + // Note: the run() function should never throw, at most it should return "permanent failure" + await expect(func()).to.be.not.eventually.rejected; + }); + + it('permanent failure if group is not a 03 one', async () => { + const job = new GroupSync.GroupSyncJob({ + identifier: TestUtils.generateFakeClosedGroupV2PkStr().slice(2), + }); + const result = await job.run(); + expect(result).to.be.eq(RunJobResult.PermanentFailure); + }); + + it('permanent failure if user has no ed keypair', async () => { + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(TestUtils.generateFakePubKeyStr()); + Sinon.stub(UserUtils, 'getUserED25519KeyPairBytes').resolves(undefined); + Sinon.stub(ConvoHub.use(), 'get').resolves({}); // anything not falsy + const job = new GroupSync.GroupSyncJob({ + identifier: TestUtils.generateFakeClosedGroupV2PkStr(), + }); + const result = await job.run(); + expect(result).to.be.eq(RunJobResult.PermanentFailure); + }); + + it('permanent failure if user has no own conversation', async () => { + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(TestUtils.generateFakePubKeyStr()); + Sinon.stub(UserUtils, 'getUserED25519KeyPairBytes').resolves({} as any); // anything not falsy + Sinon.stub(ConvoHub.use(), 'get').returns(undefined as any); + const job = new GroupSync.GroupSyncJob({ + identifier: TestUtils.generateFakeClosedGroupV2PkStr(), + }); + const result = await job.run(); + expect(result).to.be.eq(RunJobResult.PermanentFailure); + }); + + it('calls pushChangesToGroupSwarmIfNeeded if preconditions are fine', async () => { + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(TestUtils.generateFakePubKeyStr()); + Sinon.stub(UserUtils, 'getUserED25519KeyPairBytes').resolves({} as any); // anything not falsy + const taskedRun = Sinon.stub(GroupSync, 'pushChangesToGroupSwarmIfNeeded').resolves( + RunJobResult.Success + ); + Sinon.stub(ConvoHub.use(), 'get').returns({} as any); // anything not falsy + const job = new GroupSync.GroupSyncJob({ + identifier: TestUtils.generateFakeClosedGroupV2PkStr(), + }); + const result = await job.run(); + expect(result).to.be.eq(RunJobResult.Success); + expect(taskedRun.callCount).to.be.eq(1); + }); +}); + +describe('GroupSyncJob resultsToSuccessfulChange', () => { + let sodium: LibSodiumWrappers; + beforeEach(async () => { + sodium = await getSodiumNode(); + }); + it('no or empty results return empty array', () => { + expect( + LibSessionUtil.batchResultsToGroupSuccessfulChange(null, { + allOldHashes: new Set(), + messages: [], + }) + ).to.be.deep.eq([]); + + expect( + LibSessionUtil.batchResultsToGroupSuccessfulChange([] as any as NotEmptyArrayOfBatchResults, { + allOldHashes: new Set(), + messages: [], + }) + ).to.be.deep.eq([]); + }); + + it('extract one result with 200 and messagehash', () => { + const member = validMembers(sodium); + const info = validInfo(sodium); + const batchResults: NotEmptyArrayOfBatchResults = [{ code: 200, body: { hash: 'hash1' } }]; + const request: GroupDestinationChanges = { + allOldHashes: new Set(), + messages: [info, member], + }; + const results = LibSessionUtil.batchResultsToGroupSuccessfulChange(batchResults, request); + expect(results).to.be.deep.eq([ + { + updatedHash: 'hash1', + pushed: info, + }, + ]); + }); + + it('extract two results with 200 and messagehash', () => { + const member = validMembers(sodium); + const info = validInfo(sodium); + const batchResults: NotEmptyArrayOfBatchResults = [ + { code: 200, body: { hash: 'hash1' } }, + { code: 200, body: { hash: 'hash2' } }, + ]; + const request: GroupDestinationChanges = { + allOldHashes: new Set(), + messages: [info, member], + }; + const results = LibSessionUtil.batchResultsToGroupSuccessfulChange(batchResults, request); + expect(results).to.be.deep.eq([ + { + updatedHash: 'hash1', + pushed: info, + }, + { + updatedHash: 'hash2', + pushed: member, + }, + ]); + }); + + it('skip message hashes not a string', () => { + const member = validMembers(sodium); + const info = validInfo(sodium); + const batchResults: NotEmptyArrayOfBatchResults = [ + { code: 200, body: { hash: 123 as any as string } }, + { code: 200, body: { hash: 'hash2' } }, + ]; + const request: GroupDestinationChanges = { + allOldHashes: new Set(), + messages: [info, member], + }; + const results = LibSessionUtil.batchResultsToGroupSuccessfulChange(batchResults, request); + expect(results).to.be.deep.eq([ + { + updatedHash: 'hash2', + pushed: member, + }, + ]); + }); + + it('skip request item without data', () => { + const member = validMembers(sodium); + const info = validInfo(sodium); + const infoNoData = omit(info, 'ciphertext'); + const batchResults: NotEmptyArrayOfBatchResults = [ + { code: 200, body: { hash: 'hash1' } }, + { code: 200, body: { hash: 'hash2' } }, + ]; + const request: GroupDestinationChanges = { + allOldHashes: new Set(), + messages: [infoNoData as any as PendingChangesForGroup, member], + }; + const results = LibSessionUtil.batchResultsToGroupSuccessfulChange(batchResults, request); + expect(results).to.be.deep.eq([ + { + updatedHash: 'hash2', + pushed: member, + }, + ]); + }); + + it('skip request item without 200 code', () => { + const member = validMembers(sodium); + const info = validInfo(sodium); + const batchResults: NotEmptyArrayOfBatchResults = [ + { code: 200, body: { hash: 'hash1' } }, + { code: 401, body: { hash: 'hash2' } }, + ]; + const request: GroupDestinationChanges = { + allOldHashes: new Set(), + messages: [info, member], + }; + const results = LibSessionUtil.batchResultsToGroupSuccessfulChange(batchResults, request); + expect(results).to.be.deep.eq([ + { + updatedHash: 'hash1', + pushed: info, + }, + ]); + + // another test swapping the results + batchResults[0].code = 401; + batchResults[1].code = 200; + const results2 = LibSessionUtil.batchResultsToGroupSuccessfulChange(batchResults, request); + expect(results2).to.be.deep.eq([ + { + updatedHash: 'hash2', + pushed: member, + }, + ]); + }); +}); + +describe('GroupSyncJob pushChangesToGroupSwarmIfNeeded', () => { + let groupPk: GroupPubkeyType; + let userkeys: TestUtils.TestUserKeyPairs; + let sodium: LibSodiumWrappers; + + let sendStub: TypedStub; + let pendingChangesForGroupStub: TypedStub; + let saveDumpsToDbStub: TypedStub; + + beforeEach(async () => { + sodium = await getSodiumNode(); + groupPk = TestUtils.generateFakeClosedGroupV2PkStr(); + userkeys = await TestUtils.generateUserKeyPairs(); + + stubWindowLog(); + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(userkeys.x25519KeyPair.pubkeyHex); + Sinon.stub(UserUtils, 'getUserED25519KeyPairBytes').resolves(userkeys.ed25519KeyPair); + + pendingChangesForGroupStub = Sinon.stub(LibSessionUtil, 'pendingChangesForGroup'); + saveDumpsToDbStub = Sinon.stub(LibSessionUtil, 'saveDumpsToDb'); + sendStub = Sinon.stub(MessageSender, 'sendEncryptedDataToSnode'); + }); + afterEach(() => { + Sinon.restore(); + }); + + it('call savesDumpToDb even if no changes are required on the serverside', async () => { + pendingChangesForGroupStub.resolves({ allOldHashes: new Set(), messages: [] }); + + const result = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + extraStoreRequests: [], + }); + expect(result).to.be.eq(RunJobResult.Success); + expect(sendStub.callCount).to.be.eq(0); + expect(pendingChangesForGroupStub.callCount).to.be.eq(1); + expect(saveDumpsToDbStub.callCount).to.be.eq(1); + expect(saveDumpsToDbStub.firstCall.args).to.be.deep.eq([groupPk]); + }); + + it('calls sendEncryptedDataToSnode with the right data and retry if network returned nothing', async () => { + TestUtils.stubLibSessionWorker(undefined); + stubWindowFeatureFlags(); + TestUtils.stubUserGroupWrapper('getGroup', validUserGroup03WithSecKey()); + + const info = validInfo(sodium); + const member = validMembers(sodium); + const networkTimestamp = 4444; + Sinon.stub(NetworkTime, 'now').returns(networkTimestamp); + pendingChangesForGroupStub.resolves({ + messages: [info, member], + allOldHashes: new Set('123'), + }); + + const result = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + extraStoreRequests: [], + }); + + sendStub.resolves(undefined); + expect(result).to.be.eq(RunJobResult.RetryJobIfPossible); // not returning anything in the sendstub so network issue happened + expect(sendStub.callCount).to.be.eq(1); + expect(pendingChangesForGroupStub.callCount).to.be.eq(1); + expect(saveDumpsToDbStub.callCount).to.be.eq(1); + expect(saveDumpsToDbStub.firstCall.args).to.be.deep.eq([groupPk]); + }); + + it('calls sendEncryptedDataToSnode and retry if network returned nothing', async () => { + stubWindowFeatureFlags(); + TestUtils.stubUserGroupWrapper('getGroup', validUserGroup03WithSecKey(groupPk)); + + const info = validInfo(sodium); + const member = validMembers(sodium); + const keys = validKeys(sodium); + pendingChangesForGroupStub.resolves({ + messages: [keys, info, member], + allOldHashes: new Set('123'), + }); + const changes: Array = [ + { + pushed: keys, + updatedHash: 'hashkeys', + }, + { + pushed: info, + updatedHash: 'hashinfo', + }, + { + pushed: member, + updatedHash: 'hashmember', + }, + ]; + Sinon.stub(LibSessionUtil, 'batchResultsToGroupSuccessfulChange').returns(changes); + const metaConfirmPushed = Sinon.stub(MetaGroupWrapperActions, 'metaConfirmPushed').resolves(); + + sendStub.resolves([ + { code: 200, body: { hash: 'hashkeys' } }, + { code: 200, body: { hash: 'hashinfo' } }, + { code: 200, body: { hash: 'hashmember' } }, + { code: 200, body: {} }, // because we are giving a set of allOldHashes + ]); + const result = await GroupSync.pushChangesToGroupSwarmIfNeeded({ + groupPk, + extraStoreRequests: [], + }); + + expect(sendStub.callCount).to.be.eq(1); + expect(pendingChangesForGroupStub.callCount).to.be.eq(1); + + expect(saveDumpsToDbStub.firstCall.args).to.be.deep.eq([groupPk]); + expect(saveDumpsToDbStub.secondCall.args).to.be.deep.eq([groupPk]); + expect(saveDumpsToDbStub.callCount).to.be.eq(2); + + expect(metaConfirmPushed.firstCall.args).to.be.deep.eq([ + groupPk, + { + groupInfo: [123, 'hashinfo'], + groupMember: [321, 'hashmember'], + }, + ]); + expect(metaConfirmPushed.callCount).to.be.eq(1); + + expect(result).to.be.eq(RunJobResult.Success); + }); +}); diff --git a/ts/test/session/unit/utils/job_runner/user_sync_job/UserSyncJob_test.ts b/ts/test/session/unit/utils/job_runner/user_sync_job/UserSyncJob_test.ts new file mode 100644 index 0000000000..68368715bf --- /dev/null +++ b/ts/test/session/unit/utils/job_runner/user_sync_job/UserSyncJob_test.ts @@ -0,0 +1,405 @@ +import { expect } from 'chai'; +import { omit } from 'lodash'; +import Long from 'long'; +import Sinon from 'sinon'; +import { getSodiumNode } from '../../../../../../node/sodiumNode'; +import { NotEmptyArrayOfBatchResults } from '../../../../../../session/apis/snode_api/SnodeRequestTypes'; +import { + SnodeNamespaces, + SnodeNamespacesUserConfig, +} from '../../../../../../session/apis/snode_api/namespaces'; +import { ConvoHub } from '../../../../../../session/conversations'; +import { LibSodiumWrappers } from '../../../../../../session/crypto'; +import { MessageSender } from '../../../../../../session/sending'; +import { UserUtils } from '../../../../../../session/utils'; +import { RunJobResult } from '../../../../../../session/utils/job_runners/PersistedJob'; +import { UserSync } from '../../../../../../session/utils/job_runners/jobs/UserSyncJob'; +import { + LibSessionUtil, + PendingChangesForUs, + UserDestinationChanges, + UserSuccessfulChange, +} from '../../../../../../session/utils/libsession/libsession_utils'; +import { GenericWrapperActions } from '../../../../../../webworker/workers/browser/libsession_worker_interface'; +import { TestUtils } from '../../../../../test-utils'; +import { TypedStub, stubConfigDumpData } from '../../../../../test-utils/utils'; +import { NetworkTime } from '../../../../../../util/NetworkTime'; + +function userChange( + sodium: LibSodiumWrappers, + namespace: SnodeNamespacesUserConfig, + seqno: number +): PendingChangesForUs { + return { + ciphertext: sodium.randombytes_buf(120), + namespace, + seqno: Long.fromNumber(seqno), + }; +} + +describe('UserSyncJob run()', () => { + afterEach(() => { + Sinon.restore(); + }); + it('throws if no user keys', async () => { + const job = new UserSync.UserSyncJob({}); + + const func = async () => job.run(); + await expect(func()).to.be.eventually.rejected; + }); + + it('throws if our pubkey is set but not valid', async () => { + const job = new UserSync.UserSyncJob({}); + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns({ something: false } as any); + Sinon.stub(UserUtils, 'getUserED25519KeyPairBytes').resolves({ something: true } as any); + Sinon.stub(ConvoHub.use(), 'get').resolves({}); // anything not falsy + + const func = async () => job.run(); + await expect(func()).to.be.eventually.rejected; + }); + + it('permanent failure if user has no ed keypair', async () => { + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(TestUtils.generateFakePubKeyStr()); + Sinon.stub(UserUtils, 'getUserED25519KeyPairBytes').resolves(undefined); + Sinon.stub(ConvoHub.use(), 'get').resolves({}); // anything not falsy + const job = new UserSync.UserSyncJob({}); + const result = await job.run(); + expect(result).to.be.eq(RunJobResult.PermanentFailure); + }); + + it('permanent failure if user has no own conversation', async () => { + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(TestUtils.generateFakePubKeyStr()); + Sinon.stub(UserUtils, 'getUserED25519KeyPairBytes').resolves({} as any); // anything not falsy + Sinon.stub(ConvoHub.use(), 'get').returns(undefined as any); + const job = new UserSync.UserSyncJob({}); + const result = await job.run(); + expect(result).to.be.eq(RunJobResult.PermanentFailure); + }); + + it('calls pushChangesToUserSwarmIfNeeded if preconditions are fine', async () => { + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(TestUtils.generateFakePubKeyStr()); + Sinon.stub(UserUtils, 'getUserED25519KeyPairBytes').resolves({} as any); // anything not falsy + const taskedRun = Sinon.stub(UserSync, 'pushChangesToUserSwarmIfNeeded').resolves( + RunJobResult.Success + ); + Sinon.stub(ConvoHub.use(), 'get').returns({} as any); // anything not falsy + const job = new UserSync.UserSyncJob({}); + const result = await job.run(); + expect(result).to.be.eq(RunJobResult.Success); + expect(taskedRun.callCount).to.be.eq(1); + }); +}); + +describe('UserSyncJob batchResultsToUserSuccessfulChange', () => { + let sodium: LibSodiumWrappers; + beforeEach(async () => { + sodium = await getSodiumNode(); + }); + it('no or empty results return empty array', () => { + expect( + LibSessionUtil.batchResultsToUserSuccessfulChange(null, { + allOldHashes: new Set(), + messages: [], + }) + ).to.be.deep.eq([]); + + expect( + LibSessionUtil.batchResultsToUserSuccessfulChange([] as any as NotEmptyArrayOfBatchResults, { + allOldHashes: new Set(), + messages: [], + }) + ).to.be.deep.eq([]); + }); + + it('extract one result with 200 and messagehash', () => { + const profile = userChange(sodium, SnodeNamespaces.UserProfile, 321); + const contact = userChange(sodium, SnodeNamespaces.UserContacts, 123); + const batchResults: NotEmptyArrayOfBatchResults = [{ code: 200, body: { hash: 'hash1' } }]; + const request: UserDestinationChanges = { + allOldHashes: new Set(), + messages: [profile, contact], + }; + const results = LibSessionUtil.batchResultsToUserSuccessfulChange(batchResults, request); + expect(results).to.be.deep.eq([ + { + updatedHash: 'hash1', + pushed: profile, + }, + ]); + }); + + it('extract two results with 200 and messagehash', () => { + const profile = userChange(sodium, SnodeNamespaces.UserProfile, 321); + const contact = userChange(sodium, SnodeNamespaces.UserContacts, 123); + const batchResults: NotEmptyArrayOfBatchResults = [ + { code: 200, body: { hash: 'hash1' } }, + { code: 200, body: { hash: 'hash2' } }, + ]; + const request: UserDestinationChanges = { + allOldHashes: new Set(), + messages: [contact, profile], + }; + const results = LibSessionUtil.batchResultsToUserSuccessfulChange(batchResults, request); + expect(results).to.be.deep.eq([ + { + updatedHash: 'hash1', + pushed: contact, + }, + { + updatedHash: 'hash2', + pushed: profile, + }, + ]); + }); + + it('skip message hashes not a string', () => { + const profile = userChange(sodium, SnodeNamespaces.UserProfile, 321); + const contact = userChange(sodium, SnodeNamespaces.UserContacts, 123); + const batchResults: NotEmptyArrayOfBatchResults = [ + { code: 200, body: { hash: 123 as any as string } }, + { code: 200, body: { hash: 'hash2' } }, + ]; + const request: UserDestinationChanges = { + allOldHashes: new Set(), + messages: [profile, contact], + }; + const results = LibSessionUtil.batchResultsToUserSuccessfulChange(batchResults, request); + expect(results).to.be.deep.eq([ + { + updatedHash: 'hash2', + pushed: contact, + }, + ]); + }); + + it('skip request item without data', () => { + const profile = userChange(sodium, SnodeNamespaces.UserProfile, 321); + const contact = userChange(sodium, SnodeNamespaces.UserContacts, 123); + const profileNoData = omit(profile, 'ciphertext'); + const batchResults: NotEmptyArrayOfBatchResults = [ + { code: 200, body: { hash: 'hash1' } }, + { code: 200, body: { hash: 'hash2' } }, + ]; + const request: UserDestinationChanges = { + allOldHashes: new Set(), + messages: [profileNoData as any as PendingChangesForUs, contact], + }; + const results = LibSessionUtil.batchResultsToUserSuccessfulChange(batchResults, request); + expect(results).to.be.deep.eq([ + { + updatedHash: 'hash2', + pushed: contact, + }, + ]); + }); + + it('skip request item without 200 code', () => { + const profile = userChange(sodium, SnodeNamespaces.UserProfile, 321); + const contact = userChange(sodium, SnodeNamespaces.UserContacts, 123); + const batchResults: NotEmptyArrayOfBatchResults = [ + { code: 200, body: { hash: 'hash1' } }, + { code: 401, body: { hash: 'hash2' } }, + ]; + const request: UserDestinationChanges = { + allOldHashes: new Set(), + messages: [profile, contact], + }; + const results = LibSessionUtil.batchResultsToUserSuccessfulChange(batchResults, request); + expect(results).to.be.deep.eq([ + { + updatedHash: 'hash1', + pushed: profile, + }, + ]); + + // another test swapping the results + batchResults[0].code = 401; + batchResults[1].code = 200; + const results2 = LibSessionUtil.batchResultsToUserSuccessfulChange(batchResults, request); + expect(results2).to.be.deep.eq([ + { + updatedHash: 'hash2', + pushed: contact, + }, + ]); + }); +}); + +describe('UserSyncJob pushChangesToUserSwarmIfNeeded', () => { + let userkeys: TestUtils.TestUserKeyPairs; + let sodium: LibSodiumWrappers; + + let sendStub: TypedStub; + let pendingChangesForUsStub: TypedStub; + let dump: TypedStub; + + beforeEach(async () => { + sodium = await getSodiumNode(); + userkeys = await TestUtils.generateUserKeyPairs(); + + Sinon.stub(UserUtils, 'getOurPubKeyStrFromCache').returns(userkeys.x25519KeyPair.pubkeyHex); + Sinon.stub(UserUtils, 'getUserED25519KeyPairBytes').resolves(userkeys.ed25519KeyPair); + + window.Whisper = {}; + window.Whisper.events = {}; + window.Whisper.events.trigger = Sinon.mock(); + stubConfigDumpData('saveConfigDump').resolves(); + + pendingChangesForUsStub = Sinon.stub(LibSessionUtil, 'pendingChangesForUs'); + dump = Sinon.stub(GenericWrapperActions, 'dump').resolves(new Uint8Array()); + sendStub = Sinon.stub(MessageSender, 'sendEncryptedDataToSnode'); + }); + afterEach(() => { + Sinon.restore(); + }); + + it('call savesDumpToDb even if no changes are required on the serverside', async () => { + Sinon.stub(GenericWrapperActions, 'needsDump').resolves(true); + const result = await UserSync.pushChangesToUserSwarmIfNeeded(); + + pendingChangesForUsStub.resolves(undefined); + expect(result).to.be.eq(RunJobResult.Success); + expect(sendStub.callCount).to.be.eq(0); + expect(pendingChangesForUsStub.callCount).to.be.eq(1); + expect(dump.callCount).to.be.eq(4); + expect(dump.getCalls().map(m => m.args)).to.be.deep.eq([ + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ]); + }); + + it('calls sendEncryptedDataToSnode and retry if network returned nothing', async () => { + Sinon.stub(GenericWrapperActions, 'needsDump').resolves(false).onSecondCall().resolves(true); + + const profile = userChange(sodium, SnodeNamespaces.UserProfile, 321); + const contact = userChange(sodium, SnodeNamespaces.UserContacts, 123); + const networkTimestamp = 4444; + Sinon.stub(NetworkTime, 'now').returns(networkTimestamp); + + pendingChangesForUsStub.resolves({ + messages: [profile, contact], + allOldHashes: new Set('123'), + }); + const result = await UserSync.pushChangesToUserSwarmIfNeeded(); + + sendStub.resolves(undefined); + expect(result).to.be.eq(RunJobResult.RetryJobIfPossible); // not returning anything in the sendstub so network issue happened + expect(sendStub.callCount).to.be.eq(1); + expect(pendingChangesForUsStub.callCount).to.be.eq(1); + expect(dump.callCount).to.be.eq(1); + expect(dump.firstCall.args).to.be.deep.eq(['ContactsConfig']); + }); + + it('calls sendEncryptedDataToSnode with the right data x3 and retry if network returned nothing then success', async () => { + const profile = userChange(sodium, SnodeNamespaces.UserProfile, 321); + const contact = userChange(sodium, SnodeNamespaces.UserContacts, 123); + const groups = userChange(sodium, SnodeNamespaces.UserGroups, 111); + + pendingChangesForUsStub.resolves({ + messages: [profile, contact, groups], + allOldHashes: new Set('123'), + }); + const changes: Array = [ + { + pushed: profile, + updatedHash: 'hashprofile', + }, + { + pushed: contact, + updatedHash: 'hashcontact', + }, + { + pushed: groups, + updatedHash: 'hashgroup', + }, + ]; + Sinon.stub(LibSessionUtil, 'batchResultsToUserSuccessfulChange').returns(changes); + const confirmPushed = Sinon.stub(GenericWrapperActions, 'confirmPushed').resolves(); + + // all 4 need to be dumped + const needsDump = Sinon.stub(GenericWrapperActions, 'needsDump').resolves(true); + + // ============ 1st try, let's say we didn't get as much entries in the result as expected. This should be a fail + sendStub.resolves([ + { code: 200, body: { hash: 'hashprofile' } }, + { code: 200, body: { hash: 'hashcontact' } }, + { code: 200, body: { hash: 'hashgroup' } }, + ]); + let result = await UserSync.pushChangesToUserSwarmIfNeeded(); + + expect(sendStub.callCount).to.be.eq(1); + expect(pendingChangesForUsStub.callCount).to.be.eq(1); + expect(dump.getCalls().map(m => m.args)).to.be.deep.eq([ + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ]); + expect(dump.callCount).to.be.eq(4); + + expect(needsDump.getCalls().map(m => m.args)).to.be.deep.eq([ + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ]); + expect(needsDump.callCount).to.be.eq(4); + + expect(confirmPushed.callCount).to.be.eq(0); // first send failed, shouldn't confirm pushed + expect(result).to.be.eq(RunJobResult.RetryJobIfPossible); + + // ============= second try: we now should get a success + sendStub.resetHistory(); + sendStub.resolves([ + { code: 200, body: { hash: 'hashprofile2' } }, + { code: 200, body: { hash: 'hashcontact2' } }, + { code: 200, body: { hash: 'hashgroup2' } }, + { code: 200, body: {} }, // because we are giving a set of allOldHashes + ]); + changes.forEach(change => { + // eslint-disable-next-line no-param-reassign + change.updatedHash += '2'; + }); + + pendingChangesForUsStub.resetHistory(); + dump.resetHistory(); + needsDump.resetHistory(); + confirmPushed.resetHistory(); + result = await UserSync.pushChangesToUserSwarmIfNeeded(); + + expect(sendStub.callCount).to.be.eq(1); + expect(pendingChangesForUsStub.callCount).to.be.eq(1); + expect(dump.getCalls().map(m => m.args)).to.be.deep.eq([ + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ]); + + expect(needsDump.getCalls().map(m => m.args)).to.be.deep.eq([ + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ['UserConfig'], + ['ContactsConfig'], + ['UserGroupsConfig'], + ['ConvoInfoVolatileConfig'], + ]); + + expect(confirmPushed.getCalls().map(m => m.args)).to.be.deep.eq([ + ['UserConfig', 321, 'hashprofile2'], + ['ContactsConfig', 123, 'hashcontact2'], + ['UserGroupsConfig', 111, 'hashgroup2'], + ]); + expect(confirmPushed.callCount).to.be.eq(3); // second send success, we should confirm the pushes of the 3 pushed messages + + expect(result).to.be.eq(RunJobResult.Success); + }); +}); diff --git a/ts/test/test-utils/stubs/sending/PendingMessageCacheStub.ts b/ts/test/test-utils/stubs/sending/PendingMessageCacheStub.ts index b1a468298a..edd2625846 100644 --- a/ts/test/test-utils/stubs/sending/PendingMessageCacheStub.ts +++ b/ts/test/test-utils/stubs/sending/PendingMessageCacheStub.ts @@ -1,14 +1,14 @@ import { PendingMessageCache } from '../../../../session/sending'; -import { RawMessage } from '../../../../session/types'; +import { OutgoingRawMessage } from '../../../../session/types'; export class PendingMessageCacheStub extends PendingMessageCache { - public dbData: Array; - constructor(dbData: Array = []) { + public dbData: Array; + constructor(dbData: Array = []) { super(); this.dbData = dbData; } - public getCache(): Readonly> { + public getCache(): Readonly> { return this.cache; } diff --git a/ts/test/test-utils/utils/message.ts b/ts/test/test-utils/utils/message.ts index ec0f48620b..578e57f43b 100644 --- a/ts/test/test-utils/utils/message.ts +++ b/ts/test/test-utils/utils/message.ts @@ -17,9 +17,11 @@ import { OpenGroupVisibleMessage } from '../../../session/messages/outgoing/visi import { VisibleMessage } from '../../../session/messages/outgoing/visibleMessage/VisibleMessage'; import { PubKey } from '../../../session/types'; import { OpenGroupReaction } from '../../../types/Reaction'; -import { generateFakePubKey } from './pubkey'; +import { generateFakePubKeyStr } from './pubkey'; import { OpenGroupRequestCommonType } from '../../../data/types'; +const loremIpsum = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit'; + export function generateVisibleMessage({ identifier, timestamp, @@ -28,9 +30,9 @@ export function generateVisibleMessage({ timestamp?: number; } = {}): VisibleMessage { return new VisibleMessage({ - body: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit', + body: loremIpsum, identifier: identifier ?? uuid(), - timestamp: timestamp || Date.now(), + createAtNetworkTimestamp: timestamp || Date.now(), attachments: undefined, quote: undefined, expirationType: null, @@ -78,7 +80,7 @@ export function generateOpenGroupMessageV2WithServerId( export function generateOpenGroupVisibleMessage(): OpenGroupVisibleMessage { return new OpenGroupVisibleMessage({ - timestamp: Date.now(), + createAtNetworkTimestamp: Date.now(), }); } @@ -86,14 +88,9 @@ export function generateOpenGroupV2RoomInfos(): OpenGroupRequestCommonType { return { roomId: 'main', serverUrl: 'http://open.getsession.org' }; } -export function generateClosedGroupMessage( - groupId?: string, - timestamp?: number -): ClosedGroupVisibleMessage { +export function generateClosedGroupMessage(groupId?: string): ClosedGroupVisibleMessage { return new ClosedGroupVisibleMessage({ - identifier: uuid(), - groupId: groupId ? PubKey.cast(groupId) : generateFakePubKey(), - timestamp: timestamp || Date.now(), + groupId: groupId ? PubKey.cast(groupId).key : generateFakePubKeyStr(), chatMessage: generateVisibleMessage(), }); } @@ -152,16 +149,16 @@ export function generateDisappearingVisibleMessage({ if (!isEmpty(expirationTimerUpdate)) { return new ExpirationTimerUpdateMessage({ identifier: identifier ?? uuid(), - timestamp: timestamp || Date.now(), + createAtNetworkTimestamp: timestamp || Date.now(), expirationType: expirationTimerUpdate.expirationType || null, expireTimer: expirationTimerUpdate.expireTimer, }); } return new VisibleMessage({ - body: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit', + body: loremIpsum, identifier: identifier ?? uuid(), - timestamp: timestamp || Date.now(), + createAtNetworkTimestamp: timestamp || Date.now(), attachments: undefined, quote: undefined, expirationType: expirationType ?? null, diff --git a/ts/test/test-utils/utils/pubkey.ts b/ts/test/test-utils/utils/pubkey.ts index 6c8ba49e6f..be58d9247e 100644 --- a/ts/test/test-utils/utils/pubkey.ts +++ b/ts/test/test-utils/utils/pubkey.ts @@ -1,8 +1,14 @@ import * as crypto from 'crypto'; +import { GroupPubkeyType, PubkeyType, UserGroupsWrapperNode } from 'libsession_util_nodejs'; +import { KeyPair, to_hex } from 'libsodium-wrappers-sumo'; import _ from 'lodash'; import { Snode } from '../../../data/types'; +import { getSodiumNode } from '../../../node/sodiumNode'; import { ECKeyPair } from '../../../receiver/keypairs'; +import { SnodePool } from '../../../session/apis/snode_api/snodePool'; import { PubKey } from '../../../session/types'; +import { ByteKeyPair } from '../../../session/utils/User'; +import { stubData } from './stubbing'; export function generateFakePubKey(): PubKey { // Generates a mock pubkey for testing @@ -13,20 +19,64 @@ export function generateFakePubKey(): PubKey { return new PubKey(pubkeyString); } -export function generateFakePubKeyStr(): string { +export function generateFakePubKeyStr(): PubkeyType { // Generates a mock pubkey for testing const numBytes = PubKey.PUBKEY_LEN / 2 - 1; const hexBuffer = crypto.randomBytes(numBytes).toString('hex'); - const pubkeyString = `05${hexBuffer}`; + const pubkeyString: PubkeyType = `05${hexBuffer}`; return pubkeyString; } -export function generateFakeClosedGroupV3PkStr(): string { +export type TestUserKeyPairs = { + x25519KeyPair: { + pubkeyHex: PubkeyType; + pubKey: Uint8Array; + privKey: Uint8Array; + }; + ed25519KeyPair: KeyPair & ByteKeyPair; +}; + +export async function generateUserKeyPairs(): Promise { + const sodium = await getSodiumNode(); + const ed25519KeyPair = sodium.crypto_sign_seed_keypair( + sodium.randombytes_buf(sodium.crypto_sign_SEEDBYTES) + ); + const x25519PublicKey = sodium.crypto_sign_ed25519_pk_to_curve25519(ed25519KeyPair.publicKey); + // prepend version byte (coming from `processKeys(raw_keys)`) + const origPub = new Uint8Array(x25519PublicKey); + const prependedX25519PublicKey = new Uint8Array(33); + prependedX25519PublicKey.set(origPub, 1); + prependedX25519PublicKey[0] = 5; + const x25519SecretKey = sodium.crypto_sign_ed25519_sk_to_curve25519(ed25519KeyPair.privateKey); + + // prepend with 05 the public key + const userKeys = { + x25519KeyPair: { + pubkeyHex: to_hex(prependedX25519PublicKey) as PubkeyType, + pubKey: prependedX25519PublicKey, + privKey: x25519SecretKey, + }, + ed25519KeyPair: { + ...ed25519KeyPair, + pubKeyBytes: ed25519KeyPair.publicKey, + privKeyBytes: ed25519KeyPair.privateKey, + }, + }; + + return userKeys; +} + +export async function generateGroupV2(privateEd25519: Uint8Array) { + const groupWrapper = new UserGroupsWrapperNode(privateEd25519, null); + return groupWrapper.createGroup(); +} + +export function generateFakeClosedGroupV2PkStr(): GroupPubkeyType { // Generates a mock pubkey for testing const numBytes = PubKey.PUBKEY_LEN / 2 - 1; const hexBuffer = crypto.randomBytes(numBytes).toString('hex'); - const pubkeyString = `03${hexBuffer}`; + const pubkeyString: GroupPubkeyType = `03${hexBuffer}`; return pubkeyString; } @@ -43,6 +93,10 @@ export function generateFakePubKeys(amount: number): Array { return new Array(numPubKeys).fill(0).map(() => generateFakePubKey()); } +export function generateFakeSwarmFor(): Array { + return generateFakePubKeys(6).map(m => m.key); +} + export function generateFakeSnode(): Snode { return { // NOTE: make sure this is random, but not a valid ip (otherwise we will try to hit that ip during testing!) @@ -73,3 +127,15 @@ export function generateFakeSnodes(amount: number): Array { const ar: Array = _.times(amount, generateFakeSnode); return ar; } + +/** + * this function can be used to setup unit test which relies on fetching a snode pool + */ +export function setupTestWithSending() { + const snodes = generateFakeSnodes(20); + const swarm = snodes.slice(0, 6); + SnodePool.TEST_resetState(snodes); + + stubData('getSwarmNodesForPubkey').resolves(swarm.map(m => m.pubkey_ed25519)); + return { snodes, swarm }; +} diff --git a/ts/test/test-utils/utils/stubbing.ts b/ts/test/test-utils/utils/stubbing.ts index 7d696bf5b9..e938cd438a 100644 --- a/ts/test/test-utils/utils/stubbing.ts +++ b/ts/test/test-utils/utils/stubbing.ts @@ -1,10 +1,14 @@ /* eslint-disable func-names */ import { expect } from 'chai'; +import { UserGroupsWrapperActionsCalls } from 'libsession_util_nodejs'; import Sinon from 'sinon'; import { ConfigDumpData } from '../../../data/configDump/configDump'; import { Data } from '../../../data/data'; import { OpenGroupData } from '../../../data/opengroups'; +import { TestUtils } from '..'; +import { SnodePool } from '../../../session/apis/snode_api/snodePool'; +import { BlockedNumberController } from '../../../util'; import { loadLocalizedDictionary } from '../../../node/locale'; import * as libsessionWorker from '../../../webworker/workers/browser/libsession_worker_interface'; import * as utilWorker from '../../../webworker/workers/browser/util_worker_interface'; @@ -18,12 +22,6 @@ type DataFunction = typeof Data; type OpenGroupDataFunction = typeof OpenGroupData; type ConfigDumpDataFunction = typeof ConfigDumpData; -export type TypedStub, K extends keyof T> = T[K] extends ( - ...args: any -) => any - ? Sinon.SinonStub, ReturnType> - : never; - /** * Stub a function inside Data. * @@ -48,8 +46,20 @@ export function stubUtilWorker(fnName: string, returnedValue: any): sinon.SinonS .resolves(returnedValue); } -export function stubLibSessionWorker(value: any) { - Sinon.stub(libsessionWorker, 'callLibSessionWorker').resolves(value); +export function stubLibSessionWorker(resolveValue: any) { + Sinon.stub(libsessionWorker, 'callLibSessionWorker').resolves(resolveValue); +} + +export function stubBlockedNumberController() { + Sinon.stub(BlockedNumberController, 'getNumbersFromDB').resolves(); + Sinon.stub(BlockedNumberController, 'isBlocked').resolves(); +} + +export function stubUserGroupWrapper( + fn: T, + value: Awaited> +) { + Sinon.stub(libsessionWorker.UserGroupsWrapperActions, fn).resolves(value); } export function stubCreateObjectUrl() { @@ -137,6 +147,22 @@ export async function expectAsyncToThrow(toAwait: () => Promise, errorMessa } } +export type TypedStub, K extends keyof T> = T[K] extends ( + ...args: any +) => any + ? Sinon.SinonStub, ReturnType> + : never; + +export function stubValidSnodeSwarm() { + const snodes = TestUtils.generateFakeSnodes(20); + SnodePool.TEST_resetState(snodes); + const swarm = snodes.slice(0, 6); + + Sinon.stub(SnodePool, 'getSwarmFor').resolves(swarm); + + return { snodes, swarm }; +} + /** You must call stubWindowLog() before using */ export const stubI18n = () => { const { i18n } = loadLocalizedDictionary({ appLocale: 'en', logger: window.log }); diff --git a/ts/test/util/blockedNumberController_test.ts b/ts/test/util/blockedNumberController_test.ts index 3812d012f0..0dbea7a210 100644 --- a/ts/test/util/blockedNumberController_test.ts +++ b/ts/test/util/blockedNumberController_test.ts @@ -1,5 +1,6 @@ import { expect } from 'chai'; import Sinon from 'sinon'; +import { Convo } from '../../models/conversation'; import { BlockedNumberController } from '../../util/blockedNumberController'; import { TestUtils } from '../test-utils'; @@ -51,6 +52,8 @@ describe('BlockedNumberController', () => { describe('block', () => { it('should block the user', async () => { + Sinon.stub(Convo, 'commitConversationAndRefreshWrapper').resolves(); + const other = TestUtils.generateFakePubKey(); await BlockedNumberController.block(other.key); diff --git a/ts/types/MessageAttachment.ts b/ts/types/MessageAttachment.ts index bff81b474b..3b2e2cf5c7 100644 --- a/ts/types/MessageAttachment.ts +++ b/ts/types/MessageAttachment.ts @@ -11,20 +11,39 @@ import { autoOrientJPEGAttachment, captureDimensionsAndScreenshot, deleteData, + deleteDataSuccessful, loadData, replaceUnicodeV2, } from './attachments/migrations'; // NOTE I think this is only used on the renderer side, but how?! -export const deleteExternalMessageFiles = async (message: { - attachments: any; - quote: any; - preview: any; +export const deleteExternalMessageFiles = async (messageAttributes: { + attachments: Array | undefined; + quote: { attachments: Array | undefined }; + preview: Array | undefined; }) => { - const { attachments, quote, preview } = message; + let anyChanges = false; + const { attachments, quote, preview } = messageAttributes; if (attachments && attachments.length) { await Promise.all(attachments.map(deleteData)); + anyChanges = true; + + // test that the files were deleted successfully + try { + let results = await Promise.allSettled(attachments.map(deleteDataSuccessful)); + results = results.filter(result => result.status === 'rejected'); + + if (results.length) { + throw Error; + } + } catch (err) { + // eslint-disable-next-line no-console + console.warn( + '[deleteExternalMessageFiles]: Failed to delete attachments for', + messageAttributes + ); + } } if (quote && quote.attachments && quote.attachments.length) { @@ -41,6 +60,8 @@ export const deleteExternalMessageFiles = async (message: { } attachment.thumbnail = undefined; + anyChanges = true; + return attachment; }) ); @@ -57,10 +78,13 @@ export const deleteExternalMessageFiles = async (message: { } item.image = undefined; + anyChanges = true; + return image; }) ); } + return anyChanges; }; let attachmentsPath: string | undefined; diff --git a/ts/types/attachments/VisualAttachment.ts b/ts/types/attachments/VisualAttachment.ts index c14995972e..ed28890dec 100644 --- a/ts/types/attachments/VisualAttachment.ts +++ b/ts/types/attachments/VisualAttachment.ts @@ -4,10 +4,7 @@ import { blobToArrayBuffer, dataURLToBlob } from 'blob-util'; import { toLogFormat } from './Errors'; -import { - getDecryptedBlob, - getDecryptedMediaUrl, -} from '../../session/crypto/DecryptedAttachmentsManager'; +import { DecryptedAttachmentsManager } from '../../session/crypto/DecryptedAttachmentsManager'; import { ToastUtils } from '../../session/utils'; import { GoogleChrome } from '../../util'; import { autoScaleForAvatar, autoScaleForThumbnail } from '../../util/attachmentsUtil'; @@ -42,7 +39,7 @@ export const getImageDimensions = async ({ reject(error); }); // image/jpg is hard coded here but does not look to cause any issues - void getDecryptedMediaUrl(objectUrl, 'image/jpg', false) + void DecryptedAttachmentsManager.getDecryptedMediaUrl(objectUrl, 'image/jpg', false) .then(decryptedUrl => { image.src = decryptedUrl; }) @@ -62,7 +59,7 @@ export const makeImageThumbnailBuffer = async ({ 'makeImageThumbnailBuffer can only be called with what GoogleChrome image type supports' ); } - const decryptedBlob = await getDecryptedBlob(objectUrl, contentType); + const decryptedBlob = await DecryptedAttachmentsManager.getDecryptedBlob(objectUrl, contentType); const scaled = await autoScaleForThumbnail({ contentType, blob: decryptedBlob }); return blobToArrayBuffer(scaled.blob); @@ -102,11 +99,13 @@ export const makeVideoScreenshot = async ({ reject(error); }); - void getDecryptedMediaUrl(objectUrl, contentType, false).then(decryptedUrl => { - video.src = decryptedUrl; - video.muted = true; - void video.play(); // for some reason, this is to be started, otherwise the generated thumbnail will be empty - }); + void DecryptedAttachmentsManager.getDecryptedMediaUrl(objectUrl, contentType, false).then( + decryptedUrl => { + video.src = decryptedUrl; + video.muted = true; + void video.play(); // for some reason, this is to be started, otherwise the generated thumbnail will be empty + } + ); }); export async function getVideoDuration({ @@ -128,7 +127,7 @@ export async function getVideoDuration({ reject(error); }); - void getDecryptedMediaUrl(objectUrl, contentType, false) + void DecryptedAttachmentsManager.getDecryptedMediaUrl(objectUrl, contentType, false) .then(decryptedUrl => { video.src = decryptedUrl; }) @@ -161,7 +160,7 @@ export async function getAudioDuration({ reject(error); }); - void getDecryptedMediaUrl(objectUrl, contentType, false) + void DecryptedAttachmentsManager.getDecryptedMediaUrl(objectUrl, contentType, false) .then(decryptedUrl => { audio.src = decryptedUrl; }) @@ -193,8 +192,7 @@ export async function autoScaleAvatarBlob(file: File) { } catch (e) { ToastUtils.pushToastError( 'pickFileForAvatar', - 'An error happened while picking/resizing the image', - e.message || '' + `An error happened while picking/resizing the image: "${e.message || ''}"` ); window.log.error(e); return null; diff --git a/ts/types/attachments/migrations.ts b/ts/types/attachments/migrations.ts index c42ec185dd..d35bcf6899 100644 --- a/ts/types/attachments/migrations.ts +++ b/ts/types/attachments/migrations.ts @@ -1,8 +1,8 @@ +/* eslint-disable no-param-reassign */ import { arrayBufferToBlob, blobToArrayBuffer } from 'blob-util'; -import { pathExists } from 'fs-extra'; +import fse from 'fs-extra'; import { isString } from 'lodash'; - import * as GoogleChrome from '../../util/GoogleChrome'; import * as MIME from '../MIME'; import { toLogFormat } from './Errors'; @@ -145,26 +145,6 @@ export const loadData = async (attachment: any) => { return { ...attachment, data }; }; -const handleDiskDeletion = async (path: string) => { - await deleteOnDisk(path); - try { - const exists = await pathExists(path); - - // NOTE we want to confirm the path no longer exists - if (exists) { - throw Error('Error: File path still exists.'); - } - - window.log.debug(`deleteDataSuccessful: Deletion succeeded for attachment ${path}`); - return undefined; - } catch (err) { - window.log.warn( - `deleteDataSuccessful: Deletion failed for attachment ${path} ${err.message || err}` - ); - return path; - } -}; - // deleteData :: (RelativePath -> IO Unit) // Attachment -> // IO Unit @@ -177,24 +157,43 @@ export const deleteData = async (attachment: { throw new TypeError('deleteData: attachment is not valid'); } - let { path, thumbnail, screenshot } = attachment; - - if (path && isString(path)) { - const pathAfterDelete = await handleDiskDeletion(path); - path = isString(pathAfterDelete) ? pathAfterDelete : undefined; + const { path, thumbnail, screenshot } = attachment; + if (isString(path)) { + await deleteOnDisk(path); + attachment.path = ''; } - if (thumbnail && isString(thumbnail.path)) { - const pathAfterDelete = await handleDiskDeletion(thumbnail.path); - thumbnail = isString(pathAfterDelete) ? pathAfterDelete : undefined; + await deleteOnDisk(thumbnail.path); + attachment.thumbnail = undefined; } - if (screenshot && isString(screenshot.path)) { - const pathAfterDelete = await handleDiskDeletion(screenshot.path); - screenshot = isString(pathAfterDelete) ? pathAfterDelete : undefined; + await deleteOnDisk(screenshot.path); + attachment.screenshot = undefined; } - return { path, thumbnail, screenshot }; + return attachment; +}; + +export const deleteDataSuccessful = async (attachment: { + path: string; + thumbnail: any; + screenshot: any; +}) => { + const errorMessage = `deleteDataSuccessful: Deletion failed for attachment ${attachment.path}`; + // eslint-disable-next-line @typescript-eslint/no-misused-promises + return fse.pathExists(attachment.path, (err, exists) => { + if (err) { + return Promise.reject(new Error(`${errorMessage} ${err}`)); + } + + // Note we want to confirm the path no longer exists + if (exists) { + return Promise.reject(errorMessage); + } + + window.log.debug(`deleteDataSuccessful: Deletion succeeded for attachment ${attachment.path}`); + return true; + }); }; type CaptureDimensionType = { contentType: string; path: string }; diff --git a/ts/types/sqlSharedTypes.ts b/ts/types/sqlSharedTypes.ts index 48b26e84d7..5965b5e608 100644 --- a/ts/types/sqlSharedTypes.ts +++ b/ts/types/sqlSharedTypes.ts @@ -3,14 +3,16 @@ // eslint-disable-next-line camelcase import { ContactInfoSet, - DisappearingMessageConversationModeType, + GroupPubkeyType, LegacyGroupInfo, LegacyGroupMemberInfo, + Uint8ArrayFixedLength, } from 'libsession_util_nodejs'; import { from_hex } from 'libsodium-wrappers-sumo'; import { isArray, isEmpty, isEqual } from 'lodash'; -import { fromHexToArray } from '../session/utils/String'; -import { ConfigWrapperObjectTypes } from '../webworker/workers/browser/libsession_worker_functions'; +import { DisappearingMessageConversationModeType } from '../session/disappearing_messages/types'; +import { fromHexToArray, toHex } from '../session/utils/String'; +import { ConfigWrapperObjectTypesMeta } from '../webworker/workers/browser/libsession_worker_functions'; import { OpenGroupRequestCommonType, OpenGroupV2Room } from '../data/types'; /** @@ -21,6 +23,8 @@ export type AsyncWrapper any> = ( ...args: Parameters ) => Promise>; +export type AwaitedReturn any> = Awaited>; + /** * This type is used to build from an objectType filled with functions, a new object type where all the functions their async equivalent */ @@ -43,7 +47,7 @@ export type UpdateLastHashType = { }; export type ConfigDumpRow = { - variant: ConfigWrapperObjectTypes; // the variant this entry is about. (user pr, contacts, ...) + variant: ConfigWrapperObjectTypesMeta; // the variant this entry is about. (user pr, contacts, ...) publicKey: string; // either our pubkey if a dump for our own swarm or the closed group pubkey data: Uint8Array; // the blob returned by libsession.dump() call }; @@ -56,13 +60,15 @@ export const CONFIG_DUMP_TABLE = 'configDump'; export type ConfigDumpDataNode = { getByVariantAndPubkey: ( - variant: ConfigWrapperObjectTypes, + variant: ConfigWrapperObjectTypesMeta, publicKey: string ) => Array; saveConfigDump: (dump: ConfigDumpRow) => void; getAllDumpsWithData: () => Array; getAllDumpsWithoutData: () => Array; + getAllDumpsWithoutDataFor: (pk: string) => Array; + deleteDumpFor: (pk: GroupPubkeyType) => void; }; // ========== unprocessed @@ -284,3 +290,32 @@ export function capabilitiesListHasBlindEnabled(caps?: Array | null) { export function roomHasReactionsEnabled(openGroup?: OpenGroupV2Room) { return Boolean(openGroup?.capabilities?.includes('reactions')); } + +export function toFixedUint8ArrayOfLength( + data: Uint8Array, + length: T +): Uint8ArrayFixedLength { + if (data.length === length) { + return { + buffer: data, + length, + }; + } + throw new Error( + `toFixedUint8ArrayOfLength invalid. Expected length ${length} but got: ${data.length}` + ); +} + +export function stringify(obj: unknown) { + return JSON.stringify( + obj, + (_key, value) => { + return value instanceof Uint8Array + ? `Uint8Array(${value.length}): ${toHex(value)}` + : value?.type === 'Buffer' && value?.data + ? `Buffer: ${toHex(value.data)}` + : value; + }, + 2 + ); +} diff --git a/ts/updater/updater.ts b/ts/updater/updater.ts index d4941f8267..f471b2339c 100644 --- a/ts/updater/updater.ts +++ b/ts/updater/updater.ts @@ -8,7 +8,7 @@ import { gt as isVersionGreaterThan, parse as parseVersion } from 'semver'; import { windowMarkShouldQuit } from '../node/window_state'; -import { getLatestRelease } from '../node/latest_desktop_release'; +import { UPDATER_INTERVAL_MS } from '../session/constants'; import type { SetupI18nReturnType } from '../types/localizer'; import { getPrintableError, @@ -17,6 +17,7 @@ import { showDownloadUpdateDialog, showUpdateDialog, } from './common'; +import { getLatestRelease } from '../node/latest_desktop_release'; let isUpdating = false; let downloadIgnored = false; @@ -41,16 +42,13 @@ export async function start( autoUpdater.logger = logger; autoUpdater.autoDownload = false; - interval = global.setInterval( - async () => { - try { - await checkForUpdates(getMainWindow, i18n, logger); - } catch (error) { - logger.error('auto-update: error:', getPrintableError(error)); - } - }, - 1000 * 60 * 10 - ); // trigger and try to update every 10 minutes to let the file gets downloaded if we are updating + interval = global.setInterval(async () => { + try { + await checkForUpdates(getMainWindow, i18n, logger); + } catch (error) { + logger.error('auto-update: error:', getPrintableError(error)); + } + }, UPDATER_INTERVAL_MS); // trigger and try to update every 10 minutes to let the file gets downloaded if we are updating stopped = false; global.setTimeout( @@ -62,7 +60,7 @@ export async function start( } }, 2 * 60 * 1000 - ); // we do checks from the fileserver every 1 minute. + ); // we do checks from the file server every 1 minute. } export function stop() { @@ -110,7 +108,7 @@ async function checkForUpdates( logger.info('[updater] checkForUpdates isMoreRecent', isMoreRecent); if (!isMoreRecent) { logger.info( - `Fileserver has no update so we are not looking for an update from github current:${currentVersion} fromFileServer:${latestVersionFromFsFromRenderer}` + `File server has no update so we are not looking for an update from github current:${currentVersion} fromFileServer:${latestVersionFromFsFromRenderer}` ); return; } diff --git a/ts/util/NetworkTime.ts b/ts/util/NetworkTime.ts new file mode 100644 index 0000000000..eb3298ed82 --- /dev/null +++ b/ts/util/NetworkTime.ts @@ -0,0 +1,41 @@ +let latestTimestampOffset = Number.MAX_SAFE_INTEGER; + +/** + * This function has no use to be called except during tests. + * @returns the current offset we have with the rest of the network. + */ +function getLatestTimestampOffset() { + if (latestTimestampOffset === Number.MAX_SAFE_INTEGER) { + window.log.debug('latestTimestampOffset is not set yet'); + return 0; + } + // window.log.info('latestTimestampOffset is ', latestTimestampOffset); + + return latestTimestampOffset; +} + +function setLatestTimestampOffset(newOffset: number) { + latestTimestampOffset = newOffset; + if (latestTimestampOffset === Number.MAX_SAFE_INTEGER) { + window?.log?.info(`first timestamp offset received: ${newOffset}ms`); + } + latestTimestampOffset = newOffset; +} + +function now() { + // make sure to call exports here, as we stub the exported one for testing. + return Date.now() - NetworkTime.getLatestTimestampOffset(); +} + +function getNowWithNetworkOffsetSeconds() { + // make sure to call exports here, as we stub the exported one for testing. + + return Math.floor(NetworkTime.now() / 1000); +} + +export const NetworkTime = { + getNowWithNetworkOffsetSeconds, + getLatestTimestampOffset, + now, + setLatestTimestampOffset, +}; diff --git a/ts/util/accountManager.ts b/ts/util/accountManager.ts index 950206d4e6..2c53cb6670 100644 --- a/ts/util/accountManager.ts +++ b/ts/util/accountManager.ts @@ -1,5 +1,5 @@ import { isEmpty } from 'lodash'; -import { getConversationController } from '../session/conversations'; +import { ConvoHub } from '../session/conversations'; import { getSodiumRenderer } from '../session/crypto'; import { ed25519Str, fromArrayBufferToBase64, fromHex, toHex } from '../session/utils/String'; import { configurationMessageReceived, trigger } from '../shims/events'; @@ -222,7 +222,7 @@ export async function registrationDone(ourPubkey: string, displayName: string) { } // Ensure that we always have a conversation for ourself - const conversation = await getConversationController().getOrCreateAndWait( + const conversation = await ConvoHub.use().getOrCreateAndWait( ourPubkey, ConversationTypeEnum.PRIVATE ); diff --git a/ts/util/attachmentsUtil.ts b/ts/util/attachmentsUtil.ts index 7856ae0760..22aa9e168d 100644 --- a/ts/util/attachmentsUtil.ts +++ b/ts/util/attachmentsUtil.ts @@ -6,7 +6,7 @@ import loadImage from 'blueimp-load-image'; import fileSize from 'filesize'; import { StagedAttachmentType } from '../components/conversation/composition/CompositionBox'; import { SignalService } from '../protobuf'; -import { getDecryptedMediaUrl } from '../session/crypto/DecryptedAttachmentsManager'; +import { DecryptedAttachmentsManager } from '../session/crypto/DecryptedAttachmentsManager'; import { sendDataExtractionNotification } from '../session/messages/outgoing/controlMessage/DataExtractionNotificationMessage'; import { AttachmentType, save } from '../types/Attachment'; import { IMAGE_GIF, IMAGE_JPEG, IMAGE_PNG, IMAGE_TIFF, IMAGE_UNKNOWN } from '../types/MIME'; @@ -410,7 +410,11 @@ export const saveAttachmentToDisk = async ({ conversationId: string; index: number; }) => { - const decryptedUrl = await getDecryptedMediaUrl(attachment.url, attachment.contentType, false); + const decryptedUrl = await DecryptedAttachmentsManager.getDecryptedMediaUrl( + attachment.url, + attachment.contentType, + false + ); save({ attachment: { ...attachment, url: decryptedUrl }, document, diff --git a/ts/util/blockedNumberController.ts b/ts/util/blockedNumberController.ts index 39328c1bc1..1e1bbe527a 100644 --- a/ts/util/blockedNumberController.ts +++ b/ts/util/blockedNumberController.ts @@ -1,5 +1,5 @@ import { Data } from '../data/data'; -import { commitConversationAndRefreshWrapper } from '../models/conversation'; +import { Convo } from '../models/conversation'; import { PubKey } from '../session/types'; import { Storage } from './storage'; @@ -37,7 +37,7 @@ export class BlockedNumberController { if (!this.blockedNumbers.has(toBlock.key)) { this.blockedNumbers.add(toBlock.key); await this.saveToDB(BLOCKED_NUMBERS_ID, this.blockedNumbers); - await commitConversationAndRefreshWrapper(toBlock.key); + await Convo.commitConversationAndRefreshWrapper(toBlock.key); } } @@ -63,7 +63,7 @@ export class BlockedNumberController { const user = users[index]; try { // eslint-disable-next-line no-await-in-loop - await commitConversationAndRefreshWrapper(user); + await Convo.commitConversationAndRefreshWrapper(user); } catch (e) { window.log.warn( 'failed to SessionUtilContact.insertContactFromDBIntoWrapperAndRefresh with: ', @@ -102,7 +102,7 @@ export class BlockedNumberController { this.blockedNumbers = new Set(); } - private static async getNumbersFromDB(id: string): Promise> { + public static async getNumbersFromDB(id: string): Promise> { const data = await Data.getItemById(id); if (!data || !data.value) { return new Set(); diff --git a/ts/util/logging.ts b/ts/util/logging.ts index c281f73637..007097cbea 100644 --- a/ts/util/logging.ts +++ b/ts/util/logging.ts @@ -110,6 +110,12 @@ const development = window && window?.getEnvironment && window?.getEnvironment() // The Bunyan API: https://github.com/trentm/node-bunyan#log-method-api function logAtLevel(level: string, prefix: string, ...args: any) { + // when unit testing with mocha, we just log whatever we get to the console.log + if (typeof (global as any).it === 'function') { + (console as any)._log(prefix, now(), ...args); + return; + } + if (prefix === 'DEBUG' && !window.sessionFeatureFlags.debug.debugLogging) { return; } diff --git a/ts/util/missingCaseError.ts b/ts/util/missingCaseError.ts index 4673f5c5f9..8f5ff77d4b 100644 --- a/ts/util/missingCaseError.ts +++ b/ts/util/missingCaseError.ts @@ -1,20 +1,5 @@ // `missingCaseError` is useful for compile-time checking that all `case`s in // a `switch` statement have been handled, e.g. // -// type AttachmentType = 'media' | 'documents'; -// -// const type: AttachmentType = selectedTab; -// switch (type) { -// case 'media': -// return ; -// case 'documents': -// return ; -// default: -// return missingCaseError(type); -// } -// -// If we extended `AttachmentType` to `'media' | 'documents' | 'links'` the code -// above would trigger a compiler error stating that `'links'` has not been -// handled by our `switch` / `case` statement which is useful for code -// maintenance and system evolution. + export const missingCaseError = (x: never): TypeError => new TypeError(`Unhandled case: ${x}`); diff --git a/ts/util/reactions.ts b/ts/util/reactions.ts index cb1cad416b..c816bc7148 100644 --- a/ts/util/reactions.ts +++ b/ts/util/reactions.ts @@ -86,7 +86,7 @@ const sendMessageReaction = async (messageId: string, emoji: string) => { return undefined; } - let me = UserUtils.getOurPubKeyStrFromCache(); + let me: string = UserUtils.getOurPubKeyStrFromCache(); let id = Number(found.get('sent_at')); if (found.get('isPublic')) { diff --git a/ts/util/readReceipts.ts b/ts/util/readReceipts.ts index 5adb3c951e..1db89dfebd 100644 --- a/ts/util/readReceipts.ts +++ b/ts/util/readReceipts.ts @@ -1,7 +1,7 @@ import { MessageCollection } from '../models/message'; import { Data } from '../data/data'; -import { getConversationController } from '../session/conversations'; +import { ConvoHub } from '../session/conversations'; async function getTargetMessage(reader: string, messages: MessageCollection) { if (messages.length === 0) { @@ -27,11 +27,7 @@ async function onReadReceipt(receipt: { source: string; timestamp: number; readA return; } const convoId = message.get('conversationId'); // this might be a group and we don't want to handle them - if ( - !convoId || - !getConversationController().get(convoId) || - !getConversationController().get(convoId).isPrivate() - ) { + if (!convoId || !ConvoHub.use().get(convoId) || !ConvoHub.use().get(convoId).isPrivate()) { window.log.info( 'Convo is undefined or not a private chat for read receipt in convo', convoId @@ -58,7 +54,7 @@ async function onReadReceipt(receipt: { source: string; timestamp: number; readA await message.commit(); // notify frontend listeners - const conversation = getConversationController().get(message.get('conversationId')); + const conversation = ConvoHub.use().get(message.get('conversationId')); if (conversation) { conversation.updateLastMessage(); } diff --git a/ts/util/releaseFeature.ts b/ts/util/releaseFeature.ts index 17f573575f..76be916033 100644 --- a/ts/util/releaseFeature.ts +++ b/ts/util/releaseFeature.ts @@ -1,7 +1,7 @@ -import { GetNetworkTime } from '../session/apis/snode_api/getNetworkTime'; import { FEATURE_RELEASE_TIMESTAMPS } from '../session/constants'; -import { ConfigurationSync } from '../session/utils/job_runners/jobs/ConfigurationSyncJob'; +import { UserSync } from '../session/utils/job_runners/jobs/UserSyncJob'; import { assertUnreachable } from '../types/sqlSharedTypes'; +import { NetworkTime } from './NetworkTime'; import { Storage } from './storage'; let isDisappearingMessageFeatureReleased: boolean | undefined; @@ -77,15 +77,12 @@ async function checkIsFeatureReleased(featureName: FeatureNameTracked): Promise< const featureAlreadyReleased = await getIsFeatureReleased(featureName); // Is it time to release the feature based on the network timestamp? - if ( - !featureAlreadyReleased && - GetNetworkTime.getNowWithNetworkOffset() >= getFeatureReleaseTimestamp(featureName) - ) { + if (!featureAlreadyReleased && NetworkTime.now() >= getFeatureReleaseTimestamp(featureName)) { window.log.info(`[releaseFeature]: It is time to release ${featureName}. Releasing it now`); await Storage.put(featureStorageItemId(featureName), true); setIsFeatureReleasedCached(featureName, true); // trigger a sync right away so our user data is online - await ConfigurationSync.queueNewJobIfNeeded(); + await UserSync.queueNewJobIfNeeded(); } const isReleased = Boolean(getIsFeatureReleasedCached(featureName)); @@ -100,10 +97,7 @@ async function checkIsUserConfigFeatureReleased() { } async function checkIsDisappearMessageV2FeatureReleased() { - return ( - (await checkIsFeatureReleased('disappearing_messages')) || - !!process.env.MULTI?.toLocaleLowerCase().includes('disappear_v2') - ); // FIXME to remove after QA + return checkIsFeatureReleased('disappearing_messages'); } function isUserConfigFeatureReleasedCached(): boolean { @@ -112,10 +106,7 @@ function isUserConfigFeatureReleasedCached(): boolean { // NOTE Make sure to call checkIsDisappearMessageV2FeatureReleased at least once and then use this. It's mostly used in components that are rendered where we don't want to do async calls function isDisappearMessageV2FeatureReleasedCached(): boolean { - return ( - !!isDisappearingMessageFeatureReleased || - !!process.env.MULTI?.toLocaleLowerCase().includes('disappear_v2') // FIXME to remove after QA - ); + return !!isDisappearingMessageFeatureReleased; } export const ReleasedFeatures = { diff --git a/ts/webworker/worker_interface.ts b/ts/webworker/worker_interface.ts index 84f29845ad..496d8b2006 100644 --- a/ts/webworker/worker_interface.ts +++ b/ts/webworker/worker_interface.ts @@ -106,7 +106,7 @@ export class WorkerInterface { reject: (error: any) => { this._removeJob(id); const end = Date.now(); - window.log.info( + window.log.debug( `Worker job ${id} (${fnName}) failed in ${end - start}ms with ${error.message}` ); return reject(error); @@ -117,9 +117,11 @@ export class WorkerInterface { private _removeJob(id: number) { if (this._DEBUG) { this._jobs[id].complete = true; - } else { - delete this._jobs[id]; + return this._jobs[id]; } + const job = this._jobs[id]; + delete this._jobs[id]; + return job; } private _getJob(id: number) { diff --git a/ts/webworker/workers/browser/libsession_worker_functions.d.ts b/ts/webworker/workers/browser/libsession_worker_functions.d.ts deleted file mode 100644 index bb0e95484d..0000000000 --- a/ts/webworker/workers/browser/libsession_worker_functions.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { - BaseConfigActions, - ContactsConfigActionsType, - ConvoInfoVolatileConfigActionsType, - UserConfigActionsType, - UserGroupsConfigActionsType, -} from 'libsession_util_nodejs'; - -// we can only have one of those wrapper for our current user (but we can have a few configs for it to be merged into one) -type UserConfig = 'UserConfig'; -type ContactsConfig = 'ContactsConfig'; -type UserGroupsConfig = 'UserGroupsConfig'; -type ConvoInfoVolatileConfig = 'ConvoInfoVolatileConfig'; - -export type ConfigWrapperObjectTypes = - | UserConfig - | ContactsConfig - | UserGroupsConfig - | ConvoInfoVolatileConfig; - -type UserConfigFunctions = - | [UserConfig, ...BaseConfigActions] - | [UserConfig, ...UserConfigActionsType]; -type ContactsConfigFunctions = - | [ContactsConfig, ...BaseConfigActions] - | [ContactsConfig, ...ContactsConfigActionsType]; -type UserGroupsConfigFunctions = - | [UserGroupsConfig, ...BaseConfigActions] - | [UserGroupsConfig, ...UserGroupsConfigActionsType]; -type ConvoInfoVolatileConfigFunctions = - | [ConvoInfoVolatileConfig, ...BaseConfigActions] - | [ConvoInfoVolatileConfig, ...ConvoInfoVolatileConfigActionsType]; -type BlindingFunctions = ['Blinding', ...BlindingFunctions]; - -export type LibSessionWorkerFunctions = - | UserConfigFunctions - | ContactsConfigFunctions - | UserGroupsConfigFunctions - | ConvoInfoVolatileConfigFunctions - | BlindingFunctions; diff --git a/ts/webworker/workers/browser/libsession_worker_functions.ts b/ts/webworker/workers/browser/libsession_worker_functions.ts new file mode 100644 index 0000000000..783f08cd31 --- /dev/null +++ b/ts/webworker/workers/browser/libsession_worker_functions.ts @@ -0,0 +1,105 @@ +import type { + BaseConfigActions, + BlindingActionsType, + ContactsConfigActionsType, + ConvoInfoVolatileConfigActionsType, + GroupPubkeyType, + MetaGroupActionsType, + MultiEncryptActionsType, + UserConfigActionsType, + UserGroupsConfigActionsType, +} from 'libsession_util_nodejs'; + +// we can only have one of those wrapper for our current user (but we can have a few configs for it to be merged into one) +export type UserConfig = 'UserConfig'; +export type ContactsConfig = 'ContactsConfig'; +export type UserGroupsConfig = 'UserGroupsConfig'; +export type ConvoInfoVolatileConfig = 'ConvoInfoVolatileConfig'; + +export const MetaGroupConfigValue = 'MetaGroupConfig-'; +export const MultiEncryptConfigValue = 'MultiEncrypt'; +export const BlindedConfigValue = 'Blinding'; +type MetaGroupConfigType = typeof MetaGroupConfigValue; +export type MetaGroupConfig = `${MetaGroupConfigType}${GroupPubkeyType}`; +export type MultiEncryptConfig = typeof MultiEncryptConfigValue; +export type BlindingConfig = typeof BlindedConfigValue; + +export type ConfigWrapperUser = + | UserConfig + | ContactsConfig + | UserGroupsConfig + | ConvoInfoVolatileConfig; + +export type ConfigWrapperGroup = MetaGroupConfig; + +export type ConfigWrapperObjectTypesMeta = + | ConfigWrapperUser + | ConfigWrapperGroup + | MultiEncryptConfig + | BlindingConfig; + +export type ConfigWrapperGroupDetailed = 'GroupInfo' | 'GroupMember' | 'GroupKeys'; + +export type ConfigWrapperObjectTypesDetailed = ConfigWrapperUser | ConfigWrapperGroupDetailed; + +type UserConfigFunctions = + | [UserConfig, ...BaseConfigActions] + | [UserConfig, ...UserConfigActionsType]; +type ContactsConfigFunctions = + | [ContactsConfig, ...BaseConfigActions] + | [ContactsConfig, ...ContactsConfigActionsType]; +type UserGroupsConfigFunctions = + | [UserGroupsConfig, ...BaseConfigActions] + | [UserGroupsConfig, ...UserGroupsConfigActionsType]; +type ConvoInfoVolatileConfigFunctions = + | [ConvoInfoVolatileConfig, ...BaseConfigActions] + | [ConvoInfoVolatileConfig, ...ConvoInfoVolatileConfigActionsType]; +type BlindingFunctions = ['Blinding', ...BlindingActionsType]; + +// Group-related calls +type MetaGroupFunctions = [MetaGroupConfig, ...MetaGroupActionsType]; + +type MultiEncryptFunctions = [MultiEncryptConfig, ...MultiEncryptActionsType]; + +export type LibSessionWorkerFunctions = + | UserConfigFunctions + | ContactsConfigFunctions + | UserGroupsConfigFunctions + | ConvoInfoVolatileConfigFunctions + | MetaGroupFunctions + | BlindingFunctions + | MultiEncryptFunctions; + +export function isUserConfigWrapperType( + config: ConfigWrapperObjectTypesMeta +): config is ConfigWrapperUser { + return ( + config === 'ContactsConfig' || + config === 'UserConfig' || + config === 'ConvoInfoVolatileConfig' || + config === 'UserGroupsConfig' + ); +} + +export function isMetaWrapperType(config: ConfigWrapperObjectTypesMeta): config is MetaGroupConfig { + return config.startsWith(MetaGroupConfigValue); +} + +export function isMultiEncryptWrapperType( + config: ConfigWrapperObjectTypesMeta +): config is MultiEncryptConfig { + return config === 'MultiEncrypt'; +} + +export function isBlindingWrapperType( + config: ConfigWrapperObjectTypesMeta +): config is BlindingConfig { + return config === 'Blinding'; +} + +export function getGroupPubkeyFromWrapperType(type: ConfigWrapperGroup): GroupPubkeyType { + if (!type.startsWith(`${MetaGroupConfigValue}03`)) { + throw new Error(`not a metagroup variant: ${type}`); + } + return type.substring(type.indexOf('-03') + 1) as GroupPubkeyType; // typescript is not yet smart enough +} diff --git a/ts/webworker/workers/browser/libsession_worker_interface.ts b/ts/webworker/workers/browser/libsession_worker_interface.ts index c4e2fbbbad..4cae67f801 100644 --- a/ts/webworker/workers/browser/libsession_worker_interface.ts +++ b/ts/webworker/workers/browser/libsession_worker_interface.ts @@ -1,21 +1,35 @@ /* eslint-disable import/extensions */ /* eslint-disable import/no-unresolved */ import { - BaseWrapperActionsCalls, BlindingActionsCalls, ContactInfoSet, ContactsWrapperActionsCalls, ConvoInfoVolatileWrapperActionsCalls, + GenericWrapperActionsCall, + GroupInfoSet, + GroupPubkeyType, + GroupWrapperConstructor, LegacyGroupInfo, + MergeSingle, + MetaGroupWrapperActionsCalls, + MultiEncryptActionsCalls, ProfilePicture, + PubkeyType, + Uint8ArrayLen100, + Uint8ArrayLen64, UserConfigWrapperActionsCalls, + UserGroupsGet, + UserGroupsSet, UserGroupsWrapperActionsCalls, } from 'libsession_util_nodejs'; +// eslint-disable-next-line import/order import { join } from 'path'; +import { cloneDeep } from 'lodash'; import { getAppRootPath } from '../../../node/getRootPath'; +import { userGroupsActions } from '../../../state/ducks/userGroups'; import { WorkerInterface } from '../../worker_interface'; -import { ConfigWrapperObjectTypes, LibSessionWorkerFunctions } from './libsession_worker_functions'; +import { ConfigWrapperUser, LibSessionWorkerFunctions } from './libsession_worker_functions'; let libsessionWorkerInterface: WorkerInterface | undefined; @@ -37,74 +51,99 @@ const internalCallLibSessionWorker = async ([ libsessionWorkerInterface = new WorkerInterface(libsessionWorkerPath, 1 * 60 * 1000); } - return libsessionWorkerInterface?.callWorker(config, fnName, ...args); + const result = libsessionWorkerInterface?.callWorker(config, fnName, ...args); + + return result; }; -export const GenericWrapperActions = { - init: async ( - wrapperId: ConfigWrapperObjectTypes, +type GenericWrapperActionsCalls = { + init: ( + wrapperId: ConfigWrapperUser, ed25519Key: Uint8Array, dump: Uint8Array | null - ) => - /** base wrapper generic actions */ - callLibSessionWorker([wrapperId, 'init', ed25519Key, dump]) as Promise, + ) => Promise; + free: (wrapperId: ConfigWrapperUser) => Promise; + confirmPushed: GenericWrapperActionsCall; + dump: GenericWrapperActionsCall; + makeDump: GenericWrapperActionsCall; + merge: GenericWrapperActionsCall; + needsDump: GenericWrapperActionsCall; + needsPush: GenericWrapperActionsCall; + push: GenericWrapperActionsCall; + currentHashes: GenericWrapperActionsCall; + storageNamespace: GenericWrapperActionsCall; +}; + +// TODO rename this to a UserWrapperActions or UserGenericWrapperActions as those actions are only used for User Wrappers now +export const GenericWrapperActions: GenericWrapperActionsCalls = { + /** base wrapper generic actions */ + + init: async (wrapperId: ConfigWrapperUser, ed25519Key: Uint8Array, dump: Uint8Array | null) => + callLibSessionWorker([wrapperId, 'init', ed25519Key, dump]) as ReturnType< + GenericWrapperActionsCalls['init'] + >, + /** This function is used to free wrappers from memory only. * * See freeUserWrapper() in libsession.worker.ts */ - free: async (wrapperId: ConfigWrapperObjectTypes) => + free: async (wrapperId: ConfigWrapperUser) => callLibSessionWorker([wrapperId, 'free']) as Promise, - confirmPushed: async (wrapperId: ConfigWrapperObjectTypes, seqno: number, hash: string) => + confirmPushed: async (wrapperId: ConfigWrapperUser, seqno: number, hash: string) => callLibSessionWorker([wrapperId, 'confirmPushed', seqno, hash]) as ReturnType< - BaseWrapperActionsCalls['confirmPushed'] + GenericWrapperActionsCalls['confirmPushed'] >, - dump: async (wrapperId: ConfigWrapperObjectTypes) => - callLibSessionWorker([wrapperId, 'dump']) as Promise< - ReturnType + dump: async (wrapperId: ConfigWrapperUser) => + callLibSessionWorker([wrapperId, 'dump']) as ReturnType, + makeDump: async (wrapperId: ConfigWrapperUser) => + callLibSessionWorker([wrapperId, 'makeDump']) as ReturnType< + GenericWrapperActionsCalls['makeDump'] >, - merge: async ( - wrapperId: ConfigWrapperObjectTypes, - toMerge: Array<{ hash: string; data: Uint8Array }> - ) => - callLibSessionWorker([wrapperId, 'merge', toMerge]) as Promise< - ReturnType - >, - needsDump: async (wrapperId: ConfigWrapperObjectTypes) => - callLibSessionWorker([wrapperId, 'needsDump']) as Promise< - ReturnType + merge: async (wrapperId: ConfigWrapperUser, toMerge: Array) => + callLibSessionWorker([wrapperId, 'merge', toMerge]) as ReturnType< + GenericWrapperActionsCalls['merge'] >, - needsPush: async (wrapperId: ConfigWrapperObjectTypes) => - callLibSessionWorker([wrapperId, 'needsPush']) as Promise< - ReturnType + needsDump: async (wrapperId: ConfigWrapperUser) => + callLibSessionWorker([wrapperId, 'needsDump']) as ReturnType< + GenericWrapperActionsCalls['needsDump'] >, - push: async (wrapperId: ConfigWrapperObjectTypes) => - callLibSessionWorker([wrapperId, 'push']) as Promise< - ReturnType + needsPush: async (wrapperId: ConfigWrapperUser) => + callLibSessionWorker([wrapperId, 'needsPush']) as ReturnType< + GenericWrapperActionsCalls['needsPush'] >, - storageNamespace: async (wrapperId: ConfigWrapperObjectTypes) => - callLibSessionWorker([wrapperId, 'storageNamespace']) as Promise< - ReturnType + push: async (wrapperId: ConfigWrapperUser) => + callLibSessionWorker([wrapperId, 'push']) as ReturnType, + currentHashes: async (wrapperId: ConfigWrapperUser) => + callLibSessionWorker([wrapperId, 'currentHashes']) as ReturnType< + GenericWrapperActionsCalls['currentHashes'] >, - currentHashes: async (wrapperId: ConfigWrapperObjectTypes) => - callLibSessionWorker([wrapperId, 'currentHashes']) as Promise< - ReturnType + storageNamespace: async (wrapperId: ConfigWrapperUser) => + callLibSessionWorker([wrapperId, 'storageNamespace']) as ReturnType< + GenericWrapperActionsCalls['storageNamespace'] >, }; +function createBaseActionsFor(wrapperType: ConfigWrapperUser) { + return { + /* Reuse the GenericWrapperActions with the UserConfig argument */ + init: async (ed25519Key: Uint8Array, dump: Uint8Array | null) => + GenericWrapperActions.init(wrapperType, ed25519Key, dump), + free: async () => GenericWrapperActions.free(wrapperType), + confirmPushed: async (seqno: number, hash: string) => + GenericWrapperActions.confirmPushed(wrapperType, seqno, hash), + dump: async () => GenericWrapperActions.dump(wrapperType), + makeDump: async () => GenericWrapperActions.makeDump(wrapperType), + needsDump: async () => GenericWrapperActions.needsDump(wrapperType), + needsPush: async () => GenericWrapperActions.needsPush(wrapperType), + push: async () => GenericWrapperActions.push(wrapperType), + currentHashes: async () => GenericWrapperActions.currentHashes(wrapperType), + merge: async (toMerge: Array) => GenericWrapperActions.merge(wrapperType, toMerge), + storageNamespace: async () => GenericWrapperActions.storageNamespace(wrapperType), + }; +} + export const UserConfigWrapperActions: UserConfigWrapperActionsCalls = { /* Reuse the GenericWrapperActions with the UserConfig argument */ - init: async (ed25519Key: Uint8Array, dump: Uint8Array | null) => - GenericWrapperActions.init('UserConfig', ed25519Key, dump), - free: async () => GenericWrapperActions.free('UserConfig'), - confirmPushed: async (seqno: number, hash: string) => - GenericWrapperActions.confirmPushed('UserConfig', seqno, hash), - dump: async () => GenericWrapperActions.dump('UserConfig'), - merge: async (toMerge: Array<{ hash: string; data: Uint8Array }>) => - GenericWrapperActions.merge('UserConfig', toMerge), - needsDump: async () => GenericWrapperActions.needsDump('UserConfig'), - needsPush: async () => GenericWrapperActions.needsPush('UserConfig'), - push: async () => GenericWrapperActions.push('UserConfig'), - storageNamespace: async () => GenericWrapperActions.storageNamespace('UserConfig'), - currentHashes: async () => GenericWrapperActions.currentHashes('UserConfig'), + ...createBaseActionsFor('UserConfig'), /** UserConfig wrapper specific actions */ getPriority: async () => @@ -157,19 +196,7 @@ export const UserConfigWrapperActions: UserConfigWrapperActionsCalls = { export const ContactsWrapperActions: ContactsWrapperActionsCalls = { /* Reuse the GenericWrapperActions with the ContactConfig argument */ - init: async (ed25519Key: Uint8Array, dump: Uint8Array | null) => - GenericWrapperActions.init('ContactsConfig', ed25519Key, dump), - free: async () => GenericWrapperActions.free('ContactsConfig'), - confirmPushed: async (seqno: number, hash: string) => - GenericWrapperActions.confirmPushed('ContactsConfig', seqno, hash), - dump: async () => GenericWrapperActions.dump('ContactsConfig'), - merge: async (toMerge: Array<{ hash: string; data: Uint8Array }>) => - GenericWrapperActions.merge('ContactsConfig', toMerge), - needsDump: async () => GenericWrapperActions.needsDump('ContactsConfig'), - needsPush: async () => GenericWrapperActions.needsPush('ContactsConfig'), - push: async () => GenericWrapperActions.push('ContactsConfig'), - storageNamespace: async () => GenericWrapperActions.storageNamespace('ContactsConfig'), - currentHashes: async () => GenericWrapperActions.currentHashes('ContactsConfig'), + ...createBaseActionsFor('ContactsConfig'), /** ContactsConfig wrapper specific actions */ get: async (pubkeyHex: string) => @@ -192,21 +219,26 @@ export const ContactsWrapperActions: ContactsWrapperActionsCalls = { >, }; -export const UserGroupsWrapperActions: UserGroupsWrapperActionsCalls = { - /* Reuse the GenericWrapperActions with the ContactConfig argument */ - init: async (ed25519Key: Uint8Array, dump: Uint8Array | null) => - GenericWrapperActions.init('UserGroupsConfig', ed25519Key, dump), - free: async () => GenericWrapperActions.free('UserGroupsConfig'), - confirmPushed: async (seqno: number, hash: string) => - GenericWrapperActions.confirmPushed('UserGroupsConfig', seqno, hash), - dump: async () => GenericWrapperActions.dump('UserGroupsConfig'), - merge: async (toMerge: Array<{ hash: string; data: Uint8Array }>) => - GenericWrapperActions.merge('UserGroupsConfig', toMerge), - needsDump: async () => GenericWrapperActions.needsDump('UserGroupsConfig'), - needsPush: async () => GenericWrapperActions.needsPush('UserGroupsConfig'), - push: async () => GenericWrapperActions.push('UserGroupsConfig'), - storageNamespace: async () => GenericWrapperActions.storageNamespace('UserGroupsConfig'), - currentHashes: async () => GenericWrapperActions.currentHashes('UserGroupsConfig'), +// this is a cache of the new groups only. Anytime we create, update, delete, or merge a group, we update this +const groups: Map = new Map(); + +function dispatchCachedGroupsToRedux() { + window?.inboxStore?.dispatch?.( + userGroupsActions.refreshUserGroupsSlice({ groups: [...groups.values()] }) + ); +} + +export const UserGroupsWrapperActions: UserGroupsWrapperActionsCalls & { + getCachedGroup: (pubkeyHex: GroupPubkeyType) => UserGroupsGet | undefined; +} = { + /* Reuse the GenericWrapperActions with the UserGroupsConfig argument */ + ...createBaseActionsFor('UserGroupsConfig'), + // override the merge() as we need to refresh the cached groups + merge: async (toMerge: Array) => { + const mergeRet = await GenericWrapperActions.merge('UserGroupsConfig', toMerge); + await UserGroupsWrapperActions.getAllGroups(); // this refreshes the cached data after merge + return mergeRet; + }, /** UserGroups wrapper specific actions */ @@ -264,23 +296,107 @@ export const UserGroupsWrapperActions: UserGroupsWrapperActionsCalls = { callLibSessionWorker(['UserGroupsConfig', 'eraseLegacyGroup', pubkeyHex]) as Promise< ReturnType >, + + createGroup: async () => { + const group = (await callLibSessionWorker(['UserGroupsConfig', 'createGroup'])) as Awaited< + ReturnType + >; + groups.set(group.pubkeyHex, group); + dispatchCachedGroupsToRedux(); + return cloneDeep(group); + }, + + getGroup: async (pubkeyHex: GroupPubkeyType) => { + const group = (await callLibSessionWorker([ + 'UserGroupsConfig', + 'getGroup', + pubkeyHex, + ])) as Awaited>; + if (group) { + groups.set(group.pubkeyHex, group); + } else { + groups.delete(pubkeyHex); + } + dispatchCachedGroupsToRedux(); + return cloneDeep(group); + }, + + getCachedGroup: (pubkeyHex: GroupPubkeyType) => { + return groups.get(pubkeyHex); + }, + + getAllGroups: async () => { + const groupsFetched = (await callLibSessionWorker([ + 'UserGroupsConfig', + 'getAllGroups', + ])) as Awaited>; + groups.clear(); + groupsFetched.forEach(f => groups.set(f.pubkeyHex, f)); + dispatchCachedGroupsToRedux(); + return cloneDeep(groupsFetched); + }, + + setGroup: async (info: UserGroupsSet) => { + const group = (await callLibSessionWorker(['UserGroupsConfig', 'setGroup', info])) as Awaited< + ReturnType + >; + groups.set(group.pubkeyHex, group); + + dispatchCachedGroupsToRedux(); + return cloneDeep(group); + }, + + markGroupKicked: async (pubkeyHex: GroupPubkeyType) => { + const group = (await callLibSessionWorker([ + 'UserGroupsConfig', + 'markGroupKicked', + pubkeyHex, + ])) as Awaited>; + groups.set(group.pubkeyHex, group); + dispatchCachedGroupsToRedux(); + return cloneDeep(group); + }, + + markGroupInvited: async (pubkeyHex: GroupPubkeyType) => { + const group = (await callLibSessionWorker([ + 'UserGroupsConfig', + 'markGroupInvited', + pubkeyHex, + ])) as Awaited>; + groups.set(group.pubkeyHex, group); + + dispatchCachedGroupsToRedux(); + return cloneDeep(group); + }, + + markGroupDestroyed: async (pubkeyHex: GroupPubkeyType) => { + const group = (await callLibSessionWorker([ + 'UserGroupsConfig', + 'markGroupDestroyed', + pubkeyHex, + ])) as Awaited>; + groups.set(group.pubkeyHex, group); + + dispatchCachedGroupsToRedux(); + return cloneDeep(group); + }, + + eraseGroup: async (pubkeyHex: GroupPubkeyType) => { + const ret = (await callLibSessionWorker([ + 'UserGroupsConfig', + 'eraseGroup', + pubkeyHex, + ])) as Awaited>; + + groups.delete(pubkeyHex); + dispatchCachedGroupsToRedux(); + return ret; + }, }; export const ConvoInfoVolatileWrapperActions: ConvoInfoVolatileWrapperActionsCalls = { - /* Reuse the GenericWrapperActions with the ContactConfig argument */ - init: async (ed25519Key: Uint8Array, dump: Uint8Array | null) => - GenericWrapperActions.init('ConvoInfoVolatileConfig', ed25519Key, dump), - free: async () => GenericWrapperActions.free('ConvoInfoVolatileConfig'), - confirmPushed: async (seqno: number, hash: string) => - GenericWrapperActions.confirmPushed('ConvoInfoVolatileConfig', seqno, hash), - dump: async () => GenericWrapperActions.dump('ConvoInfoVolatileConfig'), - merge: async (toMerge: Array<{ hash: string; data: Uint8Array }>) => - GenericWrapperActions.merge('ConvoInfoVolatileConfig', toMerge), - needsDump: async () => GenericWrapperActions.needsDump('ConvoInfoVolatileConfig'), - needsPush: async () => GenericWrapperActions.needsPush('ConvoInfoVolatileConfig'), - push: async () => GenericWrapperActions.push('ConvoInfoVolatileConfig'), - storageNamespace: async () => GenericWrapperActions.storageNamespace('ConvoInfoVolatileConfig'), - currentHashes: async () => GenericWrapperActions.currentHashes('ConvoInfoVolatileConfig'), + /* Reuse the GenericWrapperActions with the ConvoInfoVolatileConfig argument */ + ...createBaseActionsFor('ConvoInfoVolatileConfig'), /** ConvoInfoVolatile wrapper specific actions */ // 1o1 @@ -332,6 +448,30 @@ export const ConvoInfoVolatileWrapperActions: ConvoInfoVolatileWrapperActionsCal callLibSessionWorker(['ConvoInfoVolatileConfig', 'eraseLegacyGroup', pubkeyHex]) as Promise< ReturnType >, + // groups + getGroup: async (pubkeyHex: GroupPubkeyType) => + callLibSessionWorker(['ConvoInfoVolatileConfig', 'getGroup', pubkeyHex]) as Promise< + ReturnType + >, + + getAllGroups: async () => + callLibSessionWorker(['ConvoInfoVolatileConfig', 'getAllGroups']) as Promise< + ReturnType + >, + + setGroup: async (pubkeyHex: GroupPubkeyType, lastRead: number, unread: boolean) => + callLibSessionWorker([ + 'ConvoInfoVolatileConfig', + 'setGroup', + pubkeyHex, + lastRead, + unread, + ]) as Promise>, + + eraseGroup: async (pubkeyHex: GroupPubkeyType) => + callLibSessionWorker(['ConvoInfoVolatileConfig', 'eraseGroup', pubkeyHex]) as Promise< + ReturnType + >, // communities getCommunity: async (communityFullUrl: string) => @@ -361,6 +501,269 @@ export const ConvoInfoVolatileWrapperActions: ConvoInfoVolatileWrapperActionsCal ]) as Promise>, }; +export const MetaGroupWrapperActions: MetaGroupWrapperActionsCalls = { + /** Shared actions */ + init: async (groupPk: GroupPubkeyType, options: GroupWrapperConstructor) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'init', options]) as Promise< + ReturnType + >, + + free: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'free']) as Promise< + ReturnType + >, + + needsPush: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'needsPush']) as Promise< + ReturnType + >, + push: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'push']) as Promise< + ReturnType + >, + needsDump: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'needsDump']) as Promise< + ReturnType + >, + metaDump: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'metaDump']) as Promise< + ReturnType + >, + metaMakeDump: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'metaMakeDump']) as Promise< + ReturnType + >, + metaConfirmPushed: async ( + groupPk: GroupPubkeyType, + args: Parameters[1] + ) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'metaConfirmPushed', args]) as Promise< + ReturnType + >, + metaMerge: async ( + groupPk: GroupPubkeyType, + args: Parameters[1] + ) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'metaMerge', args]) as Promise< + ReturnType + >, + + /** GroupInfo wrapper specific actions */ + infoGet: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'infoGet']) as Promise< + ReturnType + >, + infoSet: async (groupPk: GroupPubkeyType, infos: GroupInfoSet) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'infoSet', infos]) as Promise< + ReturnType + >, + infoDestroy: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'infoDestroy']) as Promise< + ReturnType + >, + + /** GroupMembers wrapper specific actions */ + memberGet: async (groupPk: GroupPubkeyType, pubkeyHex: PubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'memberGet', pubkeyHex]) as Promise< + ReturnType + >, + memberGetOrConstruct: async (groupPk: GroupPubkeyType, pubkeyHex: PubkeyType) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'memberGetOrConstruct', + pubkeyHex, + ]) as Promise>, + memberConstructAndSet: async (groupPk: GroupPubkeyType, pubkeyHex: PubkeyType) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'memberConstructAndSet', + pubkeyHex, + ]) as Promise>, + + memberGetAll: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'memberGetAll']) as Promise< + ReturnType + >, + memberGetAllPendingRemovals: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'memberGetAllPendingRemovals']) as Promise< + ReturnType + >, + memberEraseAndRekey: async (groupPk: GroupPubkeyType, members: Array) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'memberEraseAndRekey', members]) as Promise< + ReturnType + >, + membersMarkPendingRemoval: async ( + groupPk: GroupPubkeyType, + members: Array, + withMessages: boolean + ) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'membersMarkPendingRemoval', + members, + withMessages, + ]) as Promise>, + memberSetAccepted: async (groupPk: GroupPubkeyType, pubkeyHex: PubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'memberSetAccepted', pubkeyHex]) as Promise< + ReturnType + >, + memberSetPromoted: async (groupPk: GroupPubkeyType, pubkeyHex: PubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'memberSetPromoted', pubkeyHex]) as Promise< + ReturnType + >, + memberSetPromotionAccepted: async (groupPk: GroupPubkeyType, pubkeyHex: PubkeyType) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'memberSetPromotionAccepted', + pubkeyHex, + ]) as Promise>, + memberSetPromotionFailed: async (groupPk: GroupPubkeyType, pubkeyHex: PubkeyType) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'memberSetPromotionFailed', + pubkeyHex, + ]) as Promise>, + memberSetPromotionSent: async (groupPk: GroupPubkeyType, pubkeyHex: PubkeyType) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'memberSetPromotionSent', + pubkeyHex, + ]) as Promise>, + + memberSetInvited: async (groupPk: GroupPubkeyType, pubkeyHex: PubkeyType, failed: boolean) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'memberSetInvited', + pubkeyHex, + failed, + ]) as Promise>, + memberSetNameTruncated: async (groupPk: GroupPubkeyType, pubkeyHex: PubkeyType, name: string) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'memberSetNameTruncated', + pubkeyHex, + name, + ]) as Promise>, + memberSetProfilePicture: async ( + groupPk: GroupPubkeyType, + pubkeyHex: PubkeyType, + profilePicture: ProfilePicture + ) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'memberSetProfilePicture', + pubkeyHex, + profilePicture, + ]) as Promise>, + + /** GroupKeys wrapper specific actions */ + + keyRekey: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'keyRekey']) as Promise< + ReturnType + >, + keysNeedsRekey: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'keysNeedsRekey']) as Promise< + ReturnType + >, + keyGetAll: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'keyGetAll']) as Promise< + ReturnType + >, + currentHashes: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'currentHashes']) as Promise< + ReturnType + >, + + loadKeyMessage: async ( + groupPk: GroupPubkeyType, + hash: string, + data: Uint8Array, + timestampMs: number + ) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'loadKeyMessage', + hash, + data, + timestampMs, + ]) as Promise>, + keysAdmin: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'keysAdmin']) as Promise< + ReturnType + >, + keyGetCurrentGen: async (groupPk: GroupPubkeyType) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'keyGetCurrentGen']) as Promise< + ReturnType + >, + encryptMessages: async (groupPk: GroupPubkeyType, plainTexts: Array) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'encryptMessages', plainTexts]) as Promise< + ReturnType + >, + decryptMessage: async (groupPk: GroupPubkeyType, ciphertext: Uint8Array) => + callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'decryptMessage', ciphertext]) as Promise< + ReturnType + >, + makeSwarmSubAccount: async (groupPk: GroupPubkeyType, memberPubkeyHex: PubkeyType) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'makeSwarmSubAccount', + memberPubkeyHex, + ]) as Promise>, + generateSupplementKeys: async (groupPk: GroupPubkeyType, membersPubkeyHex: Array) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'generateSupplementKeys', + membersPubkeyHex, + ]) as Promise>, + swarmSubaccountSign: async ( + groupPk: GroupPubkeyType, + message: Uint8Array, + authData: Uint8ArrayLen100 + ) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'swarmSubaccountSign', + message, + authData, + ]) as Promise>, + + swarmSubAccountToken: async (groupPk: GroupPubkeyType, memberPk: PubkeyType) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'swarmSubAccountToken', + memberPk, + ]) as Promise>, + swarmVerifySubAccount: async (groupPk: GroupPubkeyType, signingValue: Uint8ArrayLen100) => + callLibSessionWorker([ + `MetaGroupConfig-${groupPk}`, + 'swarmVerifySubAccount', + signingValue, + ]) as Promise>, + loadAdminKeys: async (groupPk: GroupPubkeyType, secret: Uint8ArrayLen64) => { + return callLibSessionWorker([`MetaGroupConfig-${groupPk}`, 'loadAdminKeys', secret]) as Promise< + ReturnType + >; + }, +}; + +export const MultiEncryptWrapperActions: MultiEncryptActionsCalls = { + /* Reuse the GenericWrapperActions with the UserConfig argument */ + ...createBaseActionsFor('UserConfig'), + + /** UserConfig wrapper specific actions */ + multiEncrypt: async args => + callLibSessionWorker(['MultiEncrypt', 'multiEncrypt', args]) as Promise< + ReturnType + >, + multiDecryptEd25519: async args => + callLibSessionWorker(['MultiEncrypt', 'multiDecryptEd25519', args]) as Promise< + ReturnType + >, +}; + +export const EncryptionDomains = ['SessionGroupKickedMessage'] as const; + export const BlindingActions: BlindingActionsCalls = { blindVersionPubkey: async (opts: { ed25519SecretKey: Uint8Array }) => callLibSessionWorker(['Blinding', 'blindVersionPubkey', opts]) as Promise< diff --git a/ts/webworker/workers/node/libsession/libsession.worker.ts b/ts/webworker/workers/node/libsession/libsession.worker.ts index d2661cbcdf..e8f8fe753b 100644 --- a/ts/webworker/workers/node/libsession/libsession.worker.ts +++ b/ts/webworker/workers/node/libsession/libsession.worker.ts @@ -5,12 +5,27 @@ import { BlindingWrapperNode, ContactsConfigWrapperNode, ConvoInfoVolatileWrapperNode, + GroupPubkeyType, + GroupWrapperConstructor, + MetaGroupWrapperNode, + MultiEncryptWrapperNode, UserConfigWrapperNode, UserGroupsWrapperNode, } from 'libsession_util_nodejs'; import { isEmpty, isNull } from 'lodash'; -// eslint-disable-next-line import/no-unresolved, import/extensions -import { ConfigWrapperObjectTypes } from '../../browser/libsession_worker_functions'; + +import { + BlindingConfig, + ConfigWrapperGroup, + ConfigWrapperObjectTypesMeta, + ConfigWrapperUser, + MetaGroupConfig, + MultiEncryptConfig, + isBlindingWrapperType, + isMetaWrapperType, + isMultiEncryptWrapperType, + isUserConfigWrapperType, +} from '../../browser/libsession_worker_functions'; /* eslint-disable no-console */ /* eslint-disable strict */ @@ -30,7 +45,9 @@ let contactsConfigWrapper: ContactsConfigWrapperNode | undefined; let userGroupsConfigWrapper: UserGroupsWrapperNode | undefined; let convoInfoVolatileConfigWrapper: ConvoInfoVolatileWrapperNode | undefined; -function getUserWrapper(type: ConfigWrapperObjectTypes): BaseConfigWrapperNode | undefined { +const metaGroupWrappers: Map = new Map(); + +function getUserWrapper(type: ConfigWrapperUser): BaseConfigWrapperNode | undefined { switch (type) { case 'UserConfig': return userProfileWrapper; @@ -45,46 +62,98 @@ function getUserWrapper(type: ConfigWrapperObjectTypes): BaseConfigWrapperNode | } } -function getCorrespondingWrapper(wrapperType: ConfigWrapperObjectTypes): BaseConfigWrapperNode { - switch (wrapperType) { - case 'UserConfig': - case 'ContactsConfig': - case 'UserGroupsConfig': - case 'ConvoInfoVolatileConfig': - const wrapper = getUserWrapper(wrapperType); - if (!wrapper) { - throw new Error(`${wrapperType} is not init yet`); - } - return wrapper; +function getGroupPubkeyFromWrapperType(type: ConfigWrapperGroup): GroupPubkeyType { + assertGroupWrapperType(type); + return type.substring(type.indexOf('-03') + 1) as GroupPubkeyType; // typescript is not yet smart enough +} - default: - assertUnreachable( - wrapperType, - `getCorrespondingWrapper: Missing case error "${wrapperType}"` - ); +function getGroupWrapper(type: ConfigWrapperGroup): MetaGroupWrapperNode | undefined { + assertGroupWrapperType(type); + + if (isMetaWrapperType(type)) { + const pk = getGroupPubkeyFromWrapperType(type); + + return metaGroupWrappers.get(pk); } + assertUnreachable(type, `getGroupWrapper: Missing case error "${type}"`); +} + +function getCorrespondingUserWrapper(wrapperType: ConfigWrapperUser): BaseConfigWrapperNode { + if (isUserConfigWrapperType(wrapperType)) { + switch (wrapperType) { + case 'UserConfig': + case 'ContactsConfig': + case 'UserGroupsConfig': + case 'ConvoInfoVolatileConfig': + const wrapper = getUserWrapper(wrapperType); + if (!wrapper) { + throw new Error(`UserWrapper: ${wrapperType} is not init yet`); + } + return wrapper; + default: + assertUnreachable( + wrapperType, + `getCorrespondingUserWrapper: Missing case error "${wrapperType}"` + ); + } + } + + assertUnreachable( + wrapperType, + `getCorrespondingUserWrapper missing global handling for "${wrapperType}"` + ); +} + +function getCorrespondingGroupWrapper(wrapperType: MetaGroupConfig): MetaGroupWrapperNode { + if (isMetaWrapperType(wrapperType)) { + const wrapper = getGroupWrapper(wrapperType); + if (!wrapper) { + throw new Error(`GroupWrapper: ${wrapperType} is not init yet`); + } + return wrapper; + } + assertUnreachable( + wrapperType, + `getCorrespondingGroupWrapper missing global handling for "${wrapperType}"` + ); +} + +function getMultiEncryptWrapper(wrapperType: MultiEncryptConfig): MultiEncryptWrapperNode { + if (isMultiEncryptWrapperType(wrapperType)) { + return MultiEncryptWrapperNode; + } + assertUnreachable(wrapperType, `getMultiEncrypt missing global handling for "${wrapperType}"`); +} + +function getBlindingWrapper(wrapperType: BlindingConfig): BlindingWrapperNode { + if (isBlindingWrapperType(wrapperType)) { + return BlindingWrapperNode; + } + assertUnreachable(wrapperType, `getBlindingWrapper missing global handling for "${wrapperType}"`); } function isUInt8Array(value: any) { return value.constructor === Uint8Array; } -function assertUserWrapperType(wrapperType: ConfigWrapperObjectTypes): ConfigWrapperObjectTypes { - if ( - wrapperType !== 'ContactsConfig' && - wrapperType !== 'UserConfig' && - wrapperType !== 'UserGroupsConfig' && - wrapperType !== 'ConvoInfoVolatileConfig' - ) { +function assertUserWrapperType(wrapperType: ConfigWrapperObjectTypesMeta): ConfigWrapperUser { + if (!isUserConfigWrapperType(wrapperType)) { throw new Error(`wrapperType "${wrapperType} is not of type User"`); } return wrapperType; } +function assertGroupWrapperType(wrapperType: ConfigWrapperObjectTypesMeta): ConfigWrapperGroup { + if (!isMetaWrapperType(wrapperType)) { + throw new Error(`wrapperType "${wrapperType} is not of type Group"`); + } + return wrapperType; +} + /** * This function can be used to initialize a wrapper which takes the private ed25519 key of the user and a dump as argument. */ -function initUserWrapper(options: Array, wrapperType: ConfigWrapperObjectTypes) { +function initUserWrapper(options: Array, wrapperType: ConfigWrapperUser) { const userType = assertUserWrapperType(wrapperType); const wrapper = getUserWrapper(wrapperType); @@ -122,13 +191,13 @@ function initUserWrapper(options: Array, wrapperType: ConfigWrapperObjectTy } /** - * This function is used to free wrappers from memory only + * * This function is used to free wrappers from memory only * * NOTE only use this function for wrappers that have not been saved to the database. * * EXAMPLE When restoring an account and fetching the display name of a user. We want to fetch a UserProfile config message and make a temporary wrapper for it in order to look up the display name. */ -function freeUserWrapper(wrapperType: ConfigWrapperObjectTypes) { +function freeUserWrapper(wrapperType: ConfigWrapperObjectTypesMeta) { const userWrapperType = assertUserWrapperType(wrapperType); switch (userWrapperType) { @@ -151,53 +220,111 @@ function freeUserWrapper(wrapperType: ConfigWrapperObjectTypes) { ); } } + +/* + * This function can be used to initialize a group wrapper + */ +function initGroupWrapper(options: Array, wrapperType: ConfigWrapperGroup) { + const groupType = assertGroupWrapperType(wrapperType); + + const wrapper = getGroupWrapper(wrapperType); + if (wrapper) { + // console.warn(`group: "${wrapperType}" already init`); + return; + } + + if (options.length !== 1) { + throw new Error(`group: "${wrapperType}" init needs 1 arguments`); + } + // we need all the fields defined in GroupWrapperConstructor, but the function in the wrapper will throw if we don't forward what's needed + + const { + groupEd25519Pubkey, + groupEd25519Secretkey, + metaDumped, + userEd25519Secretkey, + }: GroupWrapperConstructor = options[0]; + + if (isMetaWrapperType(groupType)) { + const pk = getGroupPubkeyFromWrapperType(groupType); + const justCreated = new MetaGroupWrapperNode({ + groupEd25519Pubkey, + groupEd25519Secretkey, + metaDumped, + userEd25519Secretkey, + }); + + metaGroupWrappers.set(pk, justCreated); + return; + } + assertUnreachable(groupType, `initGroupWrapper: Missing case error "${groupType}"`); +} + onmessage = async (e: { - data: [number, ConfigWrapperObjectTypes | 'Blinding', string, ...any]; + data: [number, ConfigWrapperObjectTypesMeta | 'Blinding', string, ...any]; }) => { const [jobId, config, action, ...args] = e.data; try { if (action === 'init') { - if (config === 'Blinding') { - // nothing to do for the blinding wrapper, all functions are static - } else { + if (config === 'Blinding' || config === 'MultiEncrypt') { + // nothing to do for the blinding/multiEncrypt wrapper, all functions are static + postMessage([jobId, null, null]); + return; + } + if (isUserConfigWrapperType(config)) { initUserWrapper(args, config); + postMessage([jobId, null, null]); + return; } - postMessage([jobId, null, null]); - return; + if (isMetaWrapperType(config)) { + initGroupWrapper(args, config); + postMessage([jobId, null, null]); + return; + } + assertUnreachable(config, `Unhandled init wrapper type: ${config}`); } - if (action === 'free') { - if (config !== 'Blinding') { + if (config === 'Blinding' || config === 'MultiEncrypt') { + // nothing to do for the blinding/multiEncrypt wrapper, all functions are static + postMessage([jobId, null, null]); + return; + } + if (isUserConfigWrapperType(config)) { freeUserWrapper(config); + postMessage([jobId, null, null]); + return; } - postMessage([jobId, null, null]); - - return; + if (isMetaWrapperType(config)) { + const pk = getGroupPubkeyFromWrapperType(config); + metaGroupWrappers.delete(pk); + postMessage([jobId, null, null]); + return; + } + assertUnreachable(config, `Unhandled free wrapper type: ${config}`); } - let result: any; - - if (config === 'Blinding') { - const fn = (BlindingWrapperNode as any)[action]; + const wrapper = isUserConfigWrapperType(config) + ? getCorrespondingUserWrapper(config) + : isMetaWrapperType(config) + ? getCorrespondingGroupWrapper(config) + : isMultiEncryptWrapperType(config) + ? getMultiEncryptWrapper(config) + : isBlindingWrapperType(config) + ? getBlindingWrapper(config) + : undefined; + if (!wrapper) { + throw new Error(`did not find an already built (or static) wrapper for config: "${config}"`); + } + const fn = (wrapper as any)[action]; - if (!fn) { - throw new Error( - `Worker: job "${jobId}" did not find function "${action}" on wrapper "${config}"` - ); - } - result = await (BlindingWrapperNode as any)[action](...args); - } else { - const wrapper = getCorrespondingWrapper(config); - const fn = (wrapper as any)[action]; - - if (!fn) { - throw new Error( - `Worker: job "${jobId}" did not find function "${action}" on config "${config}"` - ); - } - result = await (wrapper as any)[action](...args); + if (!fn) { + throw new Error( + `Worker: job "${jobId}" did not find function "${action}" on config "${config}"` + ); } + const result = await (wrapper as any)[action](...args); + postMessage([jobId, null, result]); } catch (error) { const errorForDisplay = prepareErrorForPostMessage(error); diff --git a/ts/window.d.ts b/ts/window.d.ts index 97a9078103..dbb4774879 100644 --- a/ts/window.d.ts +++ b/ts/window.d.ts @@ -4,7 +4,6 @@ import {} from 'styled-components/cssprop'; import { Store } from '@reduxjs/toolkit'; import { Persistor } from 'redux-persist/es/types'; -import { ConversationCollection } from './models/conversation'; import { PrimaryColorStateType, ThemeStateType } from './themes/constants/colors'; import type { GetMessageArgs, @@ -141,11 +140,14 @@ declare global { sessionFeatureFlags: { useOnionRequests: boolean; useTestNet: boolean; - useClosedGroupV3: boolean; + useClosedGroupV2: boolean; + useClosedGroupV2QAButtons: boolean; + useGroupV2InviteAsAdmin: boolean; replaceLocalizedStringsWithKeys: boolean; debug: { debugLogging: boolean; debugLibsessionDumps: boolean; + debugBuiltSnodeRequests: boolean; debugFileServerRequests: boolean; debugNonSnodeRequests: boolean; debugOnionRequests: boolean; @@ -171,10 +173,8 @@ declare global { setTheme: (newTheme: string) => Promise; userConfig: any; versionInfo: any; - getConversations: () => ConversationCollection; readyForUpdates: () => void; drawAttention: () => void; - MediaRecorder: any; platform: string; openFromNotification: (convoId: string) => void; @@ -199,13 +199,12 @@ declare global { setMenuBarVisibility: (val: boolean) => void; contextMenuShown: boolean; inboxStore?: Store; + getState: () => unknown; openConversationWithMessages: (args: { conversationKey: string; messageId: string | null; }) => Promise; - LokiPushNotificationServer: any; getGlobalOnlineStatus: () => boolean; - confirmationDialog: any; setStartInTray: (val: boolean) => Promise; getStartInTray: () => Promise; getOpengroupPruning: () => Promise; @@ -215,7 +214,5 @@ declare global { setAutoUpdateEnabled: (enabled: boolean) => void; setZoomFactor: (newZoom: number) => void; updateZoomFactor: () => void; - - Signal: any; } } diff --git a/tsconfig.json b/tsconfig.json index 5d7b415cdd..7436779be8 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -26,8 +26,7 @@ "moduleResolution": "node", // Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). "resolveJsonModule": true, // Module Resolution Options - // "baseUrl": "./", // Base directory to resolve non-absolute module names. - // "paths": {}, // A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. + // "baseUrl": "./", // Base directory to resolve non-absolute module names. // "rootDirs": [], // List of root folders whose combined content represents the structure of the project at runtime. // "typeRoots": [], // List of folders to include type definitions from. // "types": [], // Type declaration files to be included in compilation. diff --git a/yarn.lock b/yarn.lock index ba5e93eb0b..c04ef63ffd 100644 --- a/yarn.lock +++ b/yarn.lock @@ -362,6 +362,18 @@ resolved "https://registry.yarnpkg.com/@iconify/types/-/types-2.0.0.tgz#ab0e9ea681d6c8a1214f30cd741fe3a20cc57f57" integrity sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg== +"@isaacs/cliui@^8.0.2": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" + integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== + dependencies: + string-width "^5.1.2" + string-width-cjs "npm:string-width@^4.2.0" + strip-ansi "^7.0.1" + strip-ansi-cjs "npm:strip-ansi@^6.0.1" + wrap-ansi "^8.1.0" + wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" + "@jridgewell/gen-mapping@^0.3.5": version "0.3.5" resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz#dcce6aff74bdf6dad1a95802b69b04a2fcb1fb36" @@ -455,6 +467,11 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" +"@pkgjs/parseargs@^0.11.0": + version "0.11.0" + resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" + integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== + "@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": version "1.1.2" resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf" @@ -943,11 +960,6 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz#56e2cc26c397c038fab0e3a917a12d5c5909e901" integrity sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA== -"@types/pify@3.0.2": - version "3.0.2" - resolved "https://registry.yarnpkg.com/@types/pify/-/pify-3.0.2.tgz#1bc75dac43e31dba981c37e0a08edddc1b49cd39" - integrity sha512-a5AKF1/9pCU3HGMkesgY6LsBdXHUY3WU+I2qgpU0J+I8XuJA1aFr59eS84/HP0+dxsyBSNbt+4yGI2adUpHwSg== - "@types/plist@^3.0.1": version "3.0.5" resolved "https://registry.yarnpkg.com/@types/plist/-/plist-3.0.5.tgz#9a0c49c0f9886c8c8696a7904dd703f6284036e0" @@ -1778,7 +1790,7 @@ available-typed-arrays@^1.0.7: dependencies: possible-typed-array-names "^1.0.0" -axios@^1.6.5: +axios@^1.3.2: version "1.7.7" resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.7.tgz#2f554296f9892a72ac8d8e4c5b79c14a91d0a47f" integrity sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q== @@ -2284,24 +2296,24 @@ clsx@^1.0.4, clsx@^1.1.1, clsx@^1.2.1: resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12" integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg== -cmake-js@^7.2.1: - version "7.3.0" - resolved "https://registry.yarnpkg.com/cmake-js/-/cmake-js-7.3.0.tgz#6fd6234b7aeec4545c1c806f9e3f7ffacd9798b2" - integrity sha512-dXs2zq9WxrV87bpJ+WbnGKv8WUBXDw8blNiwNHoRe/it+ptscxhQHKB1SJXa1w+kocLMeP28Tk4/eTCezg4o+w== +cmake-js@7.2.1: + version "7.2.1" + resolved "https://registry.yarnpkg.com/cmake-js/-/cmake-js-7.2.1.tgz#757c0d39994121b084bab96290baf115ee7712cd" + integrity sha512-AdPSz9cSIJWdKvm0aJgVu3X8i0U3mNTswJkSHzZISqmYVjZk7Td4oDFg0mCBA383wO+9pG5Ix7pEP1CZH9x2BA== dependencies: - axios "^1.6.5" + axios "^1.3.2" debug "^4" - fs-extra "^11.2.0" + fs-extra "^10.1.0" lodash.isplainobject "^4.0.6" memory-stream "^1.0.0" - node-api-headers "^1.1.0" + node-api-headers "^0.0.2" npmlog "^6.0.2" rc "^1.2.7" - semver "^7.5.4" - tar "^6.2.0" + semver "^7.3.8" + tar "^6.1.11" url-join "^4.0.1" which "^2.0.2" - yargs "^17.7.2" + yargs "^17.6.0" color-convert@^1.9.0: version "1.9.3" @@ -2584,9 +2596,9 @@ csstype@3.1.3, csstype@^3.0.2, csstype@^3.1.2: resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== -"curve25519-js@https://github.com/oxen-io/curve25519-js": +"curve25519-js@https://github.com/session-foundation/curve25519-js": version "0.0.4" - resolved "https://github.com/oxen-io/curve25519-js#102f8c0a31b5c58bad8606979036cf763be9f4f6" + resolved "https://github.com/session-foundation/curve25519-js#102f8c0a31b5c58bad8606979036cf763be9f4f6" dargs@^7.0.0: version "7.0.0" @@ -2635,11 +2647,11 @@ date-fns@^3.6.0: integrity sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww== debug@4, debug@^4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: - version "4.3.6" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.6.tgz#2ab2c38fbaffebf8aa95fdfe6d88438c7a13c52b" - integrity sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg== + version "4.3.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.7.tgz#87945b4151a011d76d95a198d7111c865c360a52" + integrity sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ== dependencies: - ms "2.1.2" + ms "^2.1.3" debug@4.3.4: version "4.3.4" @@ -3702,9 +3714,9 @@ focus-trap@^7.5.4: tabbable "^6.2.0" follow-redirects@^1.15.6: - version "1.15.6" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" - integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== + version "1.15.9" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" + integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== for-each@^0.3.3: version "0.3.3" @@ -3713,6 +3725,14 @@ for-each@^0.3.3: dependencies: is-callable "^1.1.3" +foreground-child@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.1.1.tgz#1d173e776d75d2772fed08efe4a0de1ea1b12d0d" + integrity sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg== + dependencies: + cross-spawn "^7.0.0" + signal-exit "^4.0.1" + form-data@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" @@ -3748,7 +3768,7 @@ fs-extra@^10.0.0, fs-extra@^10.1.0: jsonfile "^6.0.1" universalify "^2.0.0" -fs-extra@^11.0.0, fs-extra@^11.2.0: +fs-extra@^11.0.0: version "11.2.0" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.2.0.tgz#e70e17dfad64232287d01929399e0ea7c86b0e5b" integrity sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw== @@ -3904,17 +3924,16 @@ glob-to-regexp@^0.4.1: resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== -glob@7.1.2: - version "7.1.2" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" - integrity sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ== +glob@10.3.10: + version "10.3.10" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.10.tgz#0351ebb809fd187fe421ab96af83d3a70715df4b" + integrity sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g== dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" + foreground-child "^3.1.0" + jackspeak "^2.3.5" + minimatch "^9.0.1" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + path-scurry "^1.10.1" glob@7.2.0: version "7.2.0" @@ -4664,6 +4683,15 @@ iterator.prototype@^1.1.2: reflect.getprototypeof "^1.0.4" set-function-name "^2.0.1" +jackspeak@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8" + integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + jake@^10.8.5: version "10.9.1" resolved "https://registry.yarnpkg.com/jake/-/jake-10.9.1.tgz#8dc96b7fcc41cb19aa502af506da4e1d56f5e62b" @@ -4916,11 +4944,11 @@ levn@~0.3.0: prelude-ls "~1.1.2" type-check "~0.3.2" -"libsession_util_nodejs@https://github.com/oxen-io/libsession-util-nodejs/releases/download/v0.3.23/libsession_util_nodejs-v0.3.23.tar.gz": - version "0.3.23" - resolved "https://github.com/oxen-io/libsession-util-nodejs/releases/download/v0.3.23/libsession_util_nodejs-v0.3.23.tar.gz#fed0e0e7c087a4eb49552d7ac7da4086df79bcd4" +"libsession_util_nodejs@https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.4.5/libsession_util_nodejs-v0.4.5.tar.gz": + version "0.4.5" + resolved "https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.4.5/libsession_util_nodejs-v0.4.5.tar.gz#e5b62009b9af277201f1c61dbc10bda4bb8e757b" dependencies: - cmake-js "^7.2.1" + cmake-js "7.2.1" node-addon-api "^6.1.0" libsodium-sumo@^0.7.13: @@ -5138,6 +5166,11 @@ lowercase-keys@^2.0.0: resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== +lru-cache@^10.2.0: + version "10.2.2" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.2.tgz#48206bc114c1252940c41b25b41af5b545aca878" + integrity sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ== + lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" @@ -5342,7 +5375,7 @@ mini-css-extract-plugin@^2.7.5: schema-utils "^4.0.0" tapable "^2.2.1" -"minimatch@2 || 3", minimatch@3.0.4, minimatch@5.0.1, minimatch@9.0.3, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2, minimatch@^5.0.1: +"minimatch@2 || 3", minimatch@3.0.4, minimatch@5.0.1, minimatch@9.0.3, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2, minimatch@^5.0.1, minimatch@^9.0.1: version "3.1.2" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== @@ -5375,6 +5408,11 @@ minipass@^5.0.0: resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d" integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ== +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0": + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + minizlib@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" @@ -5443,7 +5481,7 @@ ms@2.1.2: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@2.1.3, ms@^2.1.1: +ms@2.1.3, ms@^2.1.1, ms@^2.1.3: version "2.1.3" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== @@ -5522,10 +5560,10 @@ node-addon-api@^6.1.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76" integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== -node-api-headers@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/node-api-headers/-/node-api-headers-1.2.0.tgz#b717cd420aec79031f8dc83a50eb0a8bdf24c70d" - integrity sha512-L9AiEkBfgupC0D/LsudLPOhzy/EdObsp+FHyL1zSK0kKv5FDA9rJMoRz8xd+ojxzlqfg0tTZm2h8ot2nS7bgRA== +node-api-headers@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/node-api-headers/-/node-api-headers-0.0.2.tgz#31f4c6c2750b63e598128e76a60aefca6d76ac5d" + integrity sha512-YsjmaKGPDkmhoNKIpkChtCsPVaRE0a274IdERKnuc/E8K1UJdBZ4/mvI006OijlQZHCfpRNOH3dfHQs92se8gg== node-fetch@^2.6.7: version "2.7.0" @@ -5888,6 +5926,14 @@ path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== +path-scurry@^1.10.1: + version "1.11.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" + integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== + dependencies: + lru-cache "^10.2.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + path-to-regexp@^1.7.0: version "1.8.0" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a" @@ -5925,11 +5971,6 @@ pidtree@0.6.0: resolved "https://registry.yarnpkg.com/pidtree/-/pidtree-0.6.0.tgz#90ad7b6d42d5841e69e0a2419ef38f8883aa057c" integrity sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g== -pify@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg== - pkg-dir@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" @@ -6817,7 +6858,7 @@ semver@^6.0.0, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.1.2, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.5.4: +semver@^7.1.2, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.4: version "7.6.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== @@ -6926,6 +6967,11 @@ signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== +signal-exit@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== + sinon@9.0.2: version "9.0.2" resolved "https://registry.yarnpkg.com/sinon/-/sinon-9.0.2.tgz#b9017e24633f4b1c98dfb6e784a5f0509f5fd85d" @@ -7076,6 +7122,15 @@ string-argv@0.3.2: resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.3.2.tgz#2b6d0ef24b656274d957d54e0a4bbf6153dc02b6" integrity sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q== +"string-width-cjs@npm:string-width@^4.2.0": + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + "string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" @@ -7085,7 +7140,7 @@ string-argv@0.3.2: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string-width@^5.0.0, string-width@^5.0.1: +string-width@^5.0.0, string-width@^5.0.1, string-width@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== @@ -7147,6 +7202,13 @@ string_decoder@^1.1.1: dependencies: safe-buffer "~5.2.0" +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" @@ -7269,7 +7331,7 @@ tapable@^2.1.1, tapable@^2.2.0, tapable@^2.2.1: resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== -tar@^6.1.0, tar@^6.1.11, tar@^6.2.0: +tar@^6.1.0, tar@^6.1.11: version "6.2.1" resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== @@ -7945,6 +8007,15 @@ workerpool@6.2.1: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" @@ -8066,7 +8137,7 @@ yargs@16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" -yargs@^17.0.0, yargs@^17.0.1, yargs@^17.7.2: +yargs@^17.0.0, yargs@^17.0.1, yargs@^17.6.0: version "17.7.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== @@ -8096,3 +8167,8 @@ yocto-queue@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== + +zod@^3.22.4: + version "3.23.8" + resolved "https://registry.yarnpkg.com/zod/-/zod-3.23.8.tgz#e37b957b5d52079769fb8097099b592f0ef4067d" + integrity sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==