diff --git a/keyserver/src/creators/message-creator.js b/keyserver/src/creators/message-creator.js index 072379222..ca77b7be9 100644 --- a/keyserver/src/creators/message-creator.js +++ b/keyserver/src/creators/message-creator.js @@ -1,696 +1,700 @@ // @flow import invariant from 'invariant'; import _pickBy from 'lodash/fp/pickBy.js'; import { permissionLookup } from 'lib/permissions/thread-permissions.js'; import { rawMessageInfoFromMessageData, shimUnsupportedRawMessageInfos, stripLocalIDs, } from 'lib/shared/message-utils.js'; import { pushTypes } from 'lib/shared/messages/message-spec.js'; import type { PushType } from 'lib/shared/messages/message-spec.js'; import { messageSpecs } from 'lib/shared/messages/message-specs.js'; import { messageTypes } from 'lib/types/message-types-enum.js'; import { messageDataLocalID, type MessageData, type RawMessageInfo, } from 'lib/types/message-types.js'; import { redisMessageTypes } from 'lib/types/redis-types.js'; import { threadPermissions } from 'lib/types/thread-permission-types.js'; import { updateTypes } from 'lib/types/update-types-enum.js'; import { promiseAll } from 'lib/utils/promises.js'; import createIDs from './id-creator.js'; import type { UpdatesForCurrentSession } from './update-creator.js'; import { createUpdates } from './update-creator.js'; import { dbQuery, SQL, appendSQLArray, mergeOrConditions, } from '../database/database.js'; import { processMessagesForSearch } from '../database/search-utils.js'; import { fetchMessageInfoForLocalID, fetchMessageInfoByID, } from '../fetchers/message-fetchers.js'; import { fetchOtherSessionsForViewer } from '../fetchers/session-fetchers.js'; import { fetchServerThreadInfos } from '../fetchers/thread-fetchers.js'; import type { Device, PushUserInfo } from '../push/send.js'; import { sendPushNotifs, sendRescindNotifs } from '../push/send.js'; import { handleAsyncPromise } from '../responders/handlers.js'; import type { Viewer } from '../session/viewer.js'; import { earliestFocusedTimeConsideredExpired } from '../shared/focused-times.js'; import { publisher } from '../socket/redis.js'; import { creationString } from '../utils/idempotent.js'; type UserThreadInfo = { +devices: Map, +threadIDs: Set, +notFocusedThreadIDs: Set, +userNotMemberOfSubthreads: Set, +subthreadsCanSetToUnread: Set, }; type LatestMessagesPerUser = Map< string, $ReadOnlyMap< string, { +latestMessage: string, +latestReadMessage?: string, }, >, >; type LatestMessages = $ReadOnlyArray<{ +userID: string, +threadID: string, +latestMessage: string, +latestReadMessage: ?string, }>; // Does not do permission checks! (checkThreadPermission) async function createMessages( viewer: Viewer, messageDatas: $ReadOnlyArray, updatesForCurrentSession?: UpdatesForCurrentSession = 'return', ): Promise { if (messageDatas.length === 0) { return []; } const existingMessages = await Promise.all( messageDatas.map(messageData => fetchMessageInfoForLocalID(viewer, messageDataLocalID(messageData)), ), ); const existingMessageInfos: RawMessageInfo[] = []; const newMessageDatas: MessageData[] = []; for (let i = 0; i < messageDatas.length; i++) { const existingMessage = existingMessages[i]; if (existingMessage) { existingMessageInfos.push(existingMessage); } else { newMessageDatas.push(messageDatas[i]); } } if (newMessageDatas.length === 0) { return shimUnsupportedRawMessageInfos( existingMessageInfos, viewer.platformDetails, ); } const ids = await createIDs('messages', newMessageDatas.length); const returnMessageInfos: RawMessageInfo[] = []; const subthreadPermissionsToCheck: Set = new Set(); const messageInsertRows = []; // Indices in threadsToMessageIndices point to newMessageInfos const newMessageInfos: RawMessageInfo[] = []; const threadsToMessageIndices: Map = new Map(); let nextNewMessageIndex = 0; for (let i = 0; i < messageDatas.length; i++) { const existingMessage = existingMessages[i]; if (existingMessage) { returnMessageInfos.push(existingMessage); continue; } const messageData = messageDatas[i]; const threadID = messageData.threadID; const creatorID = messageData.creatorID; let messageIndices = threadsToMessageIndices.get(threadID); if (!messageIndices) { messageIndices = []; threadsToMessageIndices.set(threadID, messageIndices); } const newMessageIndex = nextNewMessageIndex++; messageIndices.push(newMessageIndex); const serverID = ids[newMessageIndex]; if (messageData.type === messageTypes.CREATE_SUB_THREAD) { subthreadPermissionsToCheck.add(messageData.childThreadID); } const content = messageSpecs[messageData.type].messageContentForServerDB?.(messageData); const creation = messageData.localID && viewer.hasSessionInfo ? creationString(viewer, messageData.localID) : null; let targetMessageID = null; if (messageData.targetMessageID) { targetMessageID = messageData.targetMessageID; } else if (messageData.sourceMessage) { targetMessageID = messageData.sourceMessage.id; } messageInsertRows.push([ serverID, threadID, creatorID, messageData.type, content, messageData.time, creation, targetMessageID, ]); const rawMessageInfo = rawMessageInfoFromMessageData(messageData, serverID); newMessageInfos.push(rawMessageInfo); // at newMessageIndex returnMessageInfos.push(rawMessageInfo); // at i } const messageInsertQuery = SQL` INSERT INTO messages(id, thread, user, type, content, time, creation, target_message) VALUES ${messageInsertRows} `; await dbQuery(messageInsertQuery); const postMessageSendPromise = postMessageSend( viewer, threadsToMessageIndices, subthreadPermissionsToCheck, stripLocalIDs(newMessageInfos), newMessageDatas, updatesForCurrentSession, ); if (!viewer.isScriptViewer) { // If we're not being called from a script, then we avoid awaiting // postMessageSendPromise below so that we don't delay the response to the // user on external services. In that case, we use handleAsyncPromise to // make sure any exceptions are caught and logged. handleAsyncPromise(postMessageSendPromise); } await Promise.all([ updateRepliesCount(threadsToMessageIndices, newMessageDatas), viewer.isScriptViewer ? postMessageSendPromise : undefined, ]); if (updatesForCurrentSession !== 'return') { return []; } return shimUnsupportedRawMessageInfos( returnMessageInfos, viewer.platformDetails, ); } async function updateRepliesCount( threadsToMessageIndices: Map, newMessageDatas: MessageData[], ) { const updatedThreads = []; const updateThreads = SQL` UPDATE threads SET replies_count = replies_count + (CASE `; const membershipConditions = []; for (const [threadID, messages] of threadsToMessageIndices.entries()) { const newRepliesIncrease = messages .map(i => newMessageDatas[i].type) .filter(type => messageSpecs[type].includedInRepliesCount).length; if (newRepliesIncrease === 0) { continue; } updateThreads.append(SQL` WHEN id = ${threadID} THEN ${newRepliesIncrease} `); updatedThreads.push(threadID); const senders = messages.map(i => newMessageDatas[i].creatorID); membershipConditions.push( SQL`thread = ${threadID} AND user IN (${senders})`, ); } updateThreads.append(SQL` ELSE 0 END) WHERE id IN (${updatedThreads}) AND source_message IS NOT NULL `); const updateMemberships = SQL` UPDATE memberships SET sender = 1 WHERE sender = 0 AND ( `; updateMemberships.append(mergeOrConditions(membershipConditions)); updateMemberships.append(SQL` ) `); if (updatedThreads.length > 0) { const [{ threadInfos: serverThreadInfos }] = await Promise.all([ fetchServerThreadInfos({ threadIDs: new Set(updatedThreads) }), dbQuery(updateThreads), dbQuery(updateMemberships), ]); const time = Date.now(); const updates = []; for (const threadID in serverThreadInfos) { for (const member of serverThreadInfos[threadID].members) { updates.push({ userID: member.id, time, threadID, type: updateTypes.UPDATE_THREAD, }); } } await createUpdates(updates); } } // Handles: // (1) Sending push notifs // (2) Setting threads to unread and generating corresponding UpdateInfos // (3) Publishing to Redis so that active sockets pass on new messages // (4) Processing messages for search async function postMessageSend( viewer: Viewer, threadsToMessageIndices: Map, subthreadPermissionsToCheck: Set, messageInfos: RawMessageInfo[], messageDatas: MessageData[], updatesForCurrentSession: UpdatesForCurrentSession, ) { const processForSearch = processMessagesForSearch(messageInfos); let joinIndex = 0; let subthreadSelects = ''; const subthreadJoins = []; for (const subthread of subthreadPermissionsToCheck) { const index = joinIndex++; subthreadSelects += ` , stm${index}.permissions AS subthread${subthread}_permissions, stm${index}.role AS subthread${subthread}_role `; const join = SQL`LEFT JOIN memberships `; join.append(`stm${index} ON stm${index}.`); join.append(SQL`thread = ${subthread} AND `); join.append(`stm${index}.user = m.user`); subthreadJoins.push(join); } const time = earliestFocusedTimeConsideredExpired(); const visibleExtractString = `$.${threadPermissions.VISIBLE}.value`; const query = SQL` SELECT m.user, m.thread, c.platform, c.device_token, c.versions, c.id, f.user AS focused_user `; query.append(subthreadSelects); query.append(SQL` FROM memberships m LEFT JOIN cookies c ON c.user = m.user AND c.device_token IS NOT NULL LEFT JOIN focused f ON f.user = m.user AND f.thread = m.thread AND f.time > ${time} `); appendSQLArray(query, subthreadJoins, SQL` `); query.append(SQL` WHERE (m.role > 0 OR f.user IS NOT NULL) AND JSON_EXTRACT(m.permissions, ${visibleExtractString}) IS TRUE AND m.thread IN (${[...threadsToMessageIndices.keys()]}) `); const perUserInfo = new Map(); const [result] = await dbQuery(query); for (const row of result) { const userID = row.user.toString(); const threadID = row.thread.toString(); const deviceToken = row.device_token; const focusedUser = !!row.focused_user; const { platform } = row; const versions = JSON.parse(row.versions); const cookieID = row.id; let thisUserInfo = perUserInfo.get(userID); if (!thisUserInfo) { thisUserInfo = { devices: new Map(), threadIDs: new Set(), notFocusedThreadIDs: new Set(), userNotMemberOfSubthreads: new Set(), subthreadsCanSetToUnread: new Set(), }; perUserInfo.set(userID, thisUserInfo); // Subthread info will be the same for each subthread, so we only parse // it once for (const subthread of subthreadPermissionsToCheck) { const isSubthreadMember = row[`subthread${subthread}_role`] > 0; const rawSubthreadPermissions = row[`subthread${subthread}_permissions`]; const subthreadPermissions = JSON.parse(rawSubthreadPermissions); const canSeeSubthread = permissionLookup( subthreadPermissions, threadPermissions.KNOW_OF, ); if (!canSeeSubthread) { continue; } thisUserInfo.subthreadsCanSetToUnread.add(subthread); // Only include the notification from the superthread if there is no // notification from the subthread if ( !isSubthreadMember || !permissionLookup(subthreadPermissions, threadPermissions.VISIBLE) ) { thisUserInfo.userNotMemberOfSubthreads.add(subthread); } } } if (deviceToken && cookieID) { thisUserInfo.devices.set(deviceToken, { platform, deviceToken, cookieID: cookieID.toString(), codeVersion: versions ? versions.codeVersion : null, stateVersion: versions ? versions.stateVersion : null, }); } thisUserInfo.threadIDs.add(threadID); if (!focusedUser) { thisUserInfo.notFocusedThreadIDs.add(threadID); } } const messageInfosPerUser: { [userID: string]: $ReadOnlyArray, } = {}; const latestMessagesPerUser: LatestMessagesPerUser = new Map(); const userPushInfoPromises: { [string]: Promise } = {}; const userRescindInfoPromises: { [string]: Promise } = {}; for (const pair of perUserInfo) { const [userID, preUserPushInfo] = pair; const userMessageInfos = []; for (const threadID of preUserPushInfo.threadIDs) { const messageIndices = threadsToMessageIndices.get(threadID); invariant(messageIndices, `indices should exist for thread ${threadID}`); for (const messageIndex of messageIndices) { const messageInfo = messageInfos[messageIndex]; userMessageInfos.push(messageInfo); } } if (userMessageInfos.length > 0) { messageInfosPerUser[userID] = userMessageInfos; } latestMessagesPerUser.set( userID, determineLatestMessagesPerThread( preUserPushInfo, userID, threadsToMessageIndices, messageInfos, ), ); const { userNotMemberOfSubthreads } = preUserPushInfo; const userDevices = [...preUserPushInfo.devices.values()]; if (userDevices.length === 0) { continue; } const generateNotifUserInfoPromise = async (pushType: PushType) => { const promises: Array< Promise, > = []; for (const threadID of preUserPushInfo.notFocusedThreadIDs) { const messageIndices = threadsToMessageIndices.get(threadID); invariant( messageIndices, `indices should exist for thread ${threadID}`, ); promises.push( ...messageIndices.map(async messageIndex => { const messageInfo = messageInfos[messageIndex]; const { type } = messageInfo; if (messageInfo.creatorID === userID) { // We never send a user notifs about their own activity return undefined; } const { generatesNotifs } = messageSpecs[type]; const messageData = messageDatas[messageIndex]; if (!generatesNotifs) { return undefined; } const doesGenerateNotif = await generatesNotifs( messageInfo, messageData, { notifTargetUserID: userID, userNotMemberOfSubthreads, fetchMessageInfoByID: (messageID: string) => fetchMessageInfoByID(viewer, messageID), }, ); return doesGenerateNotif === pushType ? { messageInfo, messageData } : undefined; }), ); } const messagesToNotify = await Promise.all(promises); const filteredMessagesToNotify = messagesToNotify.filter(Boolean); if (filteredMessagesToNotify.length === 0) { return undefined; } return { devices: userDevices, messageInfos: filteredMessagesToNotify.map( ({ messageInfo }) => messageInfo, ), messageDatas: filteredMessagesToNotify.map( ({ messageData }) => messageData, ), }; }; const userPushInfoPromise = generateNotifUserInfoPromise(pushTypes.NOTIF); const userRescindInfoPromise = generateNotifUserInfoPromise( pushTypes.RESCIND, ); userPushInfoPromises[userID] = userPushInfoPromise; userRescindInfoPromises[userID] = userRescindInfoPromise; } const latestMessages = flattenLatestMessagesPerUser(latestMessagesPerUser); const [pushInfo, rescindInfo] = await Promise.all([ promiseAll(userPushInfoPromises), promiseAll(userRescindInfoPromises), createReadStatusUpdates(latestMessages), redisPublish(viewer, messageInfosPerUser, updatesForCurrentSession), updateLatestMessages(latestMessages), processForSearch, ]); await Promise.all([ sendPushNotifs(_pickBy(Boolean)(pushInfo)), sendRescindNotifs(_pickBy(Boolean)(rescindInfo)), ]); } async function redisPublish( viewer: Viewer, messageInfosPerUser: { [userID: string]: $ReadOnlyArray }, updatesForCurrentSession: UpdatesForCurrentSession, ) { const avoidBroadcastingToCurrentSession = viewer.hasSessionInfo && updatesForCurrentSession !== 'broadcast'; for (const userID in messageInfosPerUser) { if (userID === viewer.userID && avoidBroadcastingToCurrentSession) { continue; } const messageInfos = messageInfosPerUser[userID]; publisher.sendMessage( { userID }, { type: redisMessageTypes.NEW_MESSAGES, messages: messageInfos, }, ); } const viewerMessageInfos = messageInfosPerUser[viewer.userID]; if (!viewerMessageInfos || !avoidBroadcastingToCurrentSession) { return; } const sessionIDs = await fetchOtherSessionsForViewer(viewer); for (const sessionID of sessionIDs) { publisher.sendMessage( { userID: viewer.userID, sessionID }, { type: redisMessageTypes.NEW_MESSAGES, messages: viewerMessageInfos, }, ); } } +type LatestMessagePerThread = { + +latestMessage: string, + +latestReadMessage?: string, +}; function determineLatestMessagesPerThread( preUserPushInfo: UserThreadInfo, userID: string, threadsToMessageIndices: $ReadOnlyMap>, messageInfos: $ReadOnlyArray, -) { +): $ReadOnlyMap { const { threadIDs, notFocusedThreadIDs, subthreadsCanSetToUnread } = preUserPushInfo; - const latestMessagesPerThread = new Map(); + const latestMessagesPerThread = new Map(); for (const threadID of threadIDs) { const messageIndices = threadsToMessageIndices.get(threadID); invariant(messageIndices, `indices should exist for thread ${threadID}`); for (const messageIndex of messageIndices) { const messageInfo = messageInfos[messageIndex]; if ( messageInfo.type === messageTypes.CREATE_SUB_THREAD && !subthreadsCanSetToUnread.has(messageInfo.childThreadID) ) { continue; } const messageID = messageInfo.id; invariant( messageID, 'message ID should exist in determineLatestMessagesPerThread', ); if ( notFocusedThreadIDs.has(threadID) && messageInfo.creatorID !== userID ) { latestMessagesPerThread.set(threadID, { latestMessage: messageID, }); } else { latestMessagesPerThread.set(threadID, { latestMessage: messageID, latestReadMessage: messageID, }); } } } return latestMessagesPerThread; } function flattenLatestMessagesPerUser( latestMessagesPerUser: LatestMessagesPerUser, ): LatestMessages { const result = []; for (const [userID, latestMessagesPerThread] of latestMessagesPerUser) { for (const [threadID, latestMessages] of latestMessagesPerThread) { result.push({ userID, threadID, latestMessage: latestMessages.latestMessage, latestReadMessage: latestMessages.latestReadMessage, }); } } return result; } async function createReadStatusUpdates(latestMessages: LatestMessages) { const now = Date.now(); const readStatusUpdates = latestMessages .filter(message => !message.latestReadMessage) .map(({ userID, threadID }) => ({ type: updateTypes.UPDATE_THREAD_READ_STATUS, userID, time: now, threadID, unread: true, })); if (readStatusUpdates.length === 0) { return; } await createUpdates(readStatusUpdates); } function updateLatestMessages(latestMessages: LatestMessages) { if (latestMessages.length === 0) { return; } const query = SQL` UPDATE memberships SET `; const lastMessageExpression = SQL` last_message = GREATEST(last_message, CASE `; const lastReadMessageExpression = SQL` , last_read_message = GREATEST(last_read_message, CASE `; let shouldUpdateLastReadMessage = false; for (const { userID, threadID, latestMessage, latestReadMessage, } of latestMessages) { lastMessageExpression.append(SQL` WHEN user = ${userID} AND thread = ${threadID} THEN ${latestMessage} `); if (latestReadMessage) { shouldUpdateLastReadMessage = true; lastReadMessageExpression.append(SQL` WHEN user = ${userID} AND thread = ${threadID} THEN ${latestReadMessage} `); } } lastMessageExpression.append(SQL` ELSE last_message END) `); lastReadMessageExpression.append(SQL` ELSE last_read_message END) `); const conditions = latestMessages.map( ({ userID, threadID }) => SQL`(user = ${userID} AND thread = ${threadID})`, ); query.append(lastMessageExpression); if (shouldUpdateLastReadMessage) { query.append(lastReadMessageExpression); } query.append(SQL`WHERE `); query.append(mergeOrConditions(conditions)); dbQuery(query); } export default createMessages; diff --git a/keyserver/src/creators/report-creator.js b/keyserver/src/creators/report-creator.js index e6e7658db..038bfc734 100644 --- a/keyserver/src/creators/report-creator.js +++ b/keyserver/src/creators/report-creator.js @@ -1,238 +1,238 @@ // @flow import _isEqual from 'lodash/fp/isEqual.js'; import bots from 'lib/facts/bots.js'; import { filterRawEntryInfosByCalendarQuery, serverEntryInfosObject, } from 'lib/shared/entry-utils.js'; import { messageTypes } from 'lib/types/message-types-enum.js'; import { type ReportCreationRequest, type ReportCreationResponse, type ThreadInconsistencyReportCreationRequest, type EntryInconsistencyReportCreationRequest, type UserInconsistencyReportCreationRequest, reportTypes, } from 'lib/types/report-types.js'; import { values } from 'lib/utils/objects.js'; import { sanitizeReduxReport, type ReduxCrashReport, } from 'lib/utils/sanitization.js'; import createIDs from './id-creator.js'; import createMessages from './message-creator.js'; import { dbQuery, SQL } from '../database/database.js'; import { fetchUsername } from '../fetchers/user-fetchers.js'; import { handleAsyncPromise } from '../responders/handlers.js'; import { createBotViewer } from '../session/bots.js'; import type { Viewer } from '../session/viewer.js'; import { getAndAssertKeyserverURLFacts } from '../utils/urls.js'; const { commbot } = bots; async function createReport( viewer: Viewer, request: ReportCreationRequest, ): Promise { const shouldIgnore = await ignoreReport(viewer, request); if (shouldIgnore) { return null; } const [id] = await createIDs('reports', 1); let type, report, time; if (request.type === reportTypes.THREAD_INCONSISTENCY) { ({ type, time, ...report } = request); time = time ? time : Date.now(); } else if (request.type === reportTypes.ENTRY_INCONSISTENCY) { ({ type, time, ...report } = request); } else if (request.type === reportTypes.MEDIA_MISSION) { ({ type, time, ...report } = request); } else if (request.type === reportTypes.USER_INCONSISTENCY) { ({ type, time, ...report } = request); } else { ({ type, ...report } = request); time = Date.now(); const redactedReduxReport: ReduxCrashReport = sanitizeReduxReport({ preloadedState: report.preloadedState, currentState: report.currentState, actions: report.actions, }); report = { ...report, ...redactedReduxReport, }; } const row = [ id, viewer.id, type, request.platformDetails.platform, JSON.stringify(report), time, ]; const query = SQL` INSERT INTO reports (id, user, type, platform, report, creation_time) VALUES ${[row]} `; await dbQuery(query); handleAsyncPromise(sendCommbotMessage(viewer, request, id)); return { id }; } async function sendCommbotMessage( viewer: Viewer, request: ReportCreationRequest, reportID: string, ): Promise { const canGenerateMessage = getCommbotMessage(request, reportID, null); if (!canGenerateMessage) { return; } const username = await fetchUsername(viewer.id); const message = getCommbotMessage(request, reportID, username); if (!message) { return; } const time = Date.now(); await createMessages(createBotViewer(commbot.userID), [ { type: messageTypes.TEXT, threadID: commbot.staffThreadID, creatorID: commbot.userID, time, text: message, }, ]); } async function ignoreReport( viewer: Viewer, request: ReportCreationRequest, ): Promise { // The below logic is to avoid duplicate inconsistency reports if ( request.type !== reportTypes.THREAD_INCONSISTENCY && request.type !== reportTypes.ENTRY_INCONSISTENCY ) { return false; } const { type, platformDetails, time } = request; if (!time) { return false; } const { platform } = platformDetails; const query = SQL` SELECT id FROM reports WHERE user = ${viewer.id} AND type = ${type} AND platform = ${platform} AND creation_time = ${time} `; const [result] = await dbQuery(query); return result.length !== 0; } function getCommbotMessage( request: ReportCreationRequest, reportID: string, username: ?string, ): ?string { const name = username ? username : '[null]'; const { platformDetails } = request; const { platform, codeVersion } = platformDetails; const platformString = codeVersion ? `${platform} v${codeVersion}` : platform; if (request.type === reportTypes.ERROR) { const { baseDomain, basePath } = getAndAssertKeyserverURLFacts(); return ( `${name} got an error :(\n` + `using ${platformString}\n` + `${baseDomain}${basePath}download_error_report/${reportID}` ); } else if (request.type === reportTypes.THREAD_INCONSISTENCY) { const nonMatchingThreadIDs = getInconsistentThreadIDsFromReport(request); const nonMatchingString = [...nonMatchingThreadIDs].join(', '); return ( `system detected inconsistency for ${name}!\n` + `using ${platformString}\n` + `occurred during ${request.action.type}\n` + `thread IDs that are inconsistent: ${nonMatchingString}` ); } else if (request.type === reportTypes.ENTRY_INCONSISTENCY) { const nonMatchingEntryIDs = getInconsistentEntryIDsFromReport(request); const nonMatchingString = [...nonMatchingEntryIDs].join(', '); return ( `system detected inconsistency for ${name}!\n` + `using ${platformString}\n` + `occurred during ${request.action.type}\n` + `entry IDs that are inconsistent: ${nonMatchingString}` ); } else if (request.type === reportTypes.USER_INCONSISTENCY) { const nonMatchingUserIDs = getInconsistentUserIDsFromReport(request); const nonMatchingString = [...nonMatchingUserIDs].join(', '); return ( `system detected inconsistency for ${name}!\n` + `using ${platformString}\n` + `occurred during ${request.action.type}\n` + `user IDs that are inconsistent: ${nonMatchingString}` ); } else if (request.type === reportTypes.MEDIA_MISSION) { const mediaMissionJSON = JSON.stringify(request.mediaMission); const success = request.mediaMission.result.success ? 'media mission success!' : 'media mission failed :('; return `${name} ${success}\n` + mediaMissionJSON; } else { return null; } } function findInconsistentObjectKeys( first: { +[id: string]: O }, second: { +[id: string]: O }, ): Set { - const nonMatchingIDs = new Set(); + const nonMatchingIDs = new Set(); for (const id in first) { if (!_isEqual(first[id])(second[id])) { nonMatchingIDs.add(id); } } for (const id in second) { if (!first[id]) { nonMatchingIDs.add(id); } } return nonMatchingIDs; } function getInconsistentThreadIDsFromReport( request: ThreadInconsistencyReportCreationRequest, ): Set { const { pushResult, beforeAction } = request; return findInconsistentObjectKeys(beforeAction, pushResult); } function getInconsistentEntryIDsFromReport( request: EntryInconsistencyReportCreationRequest, ): Set { const { pushResult, beforeAction, calendarQuery } = request; const filteredBeforeAction = filterRawEntryInfosByCalendarQuery( serverEntryInfosObject(values(beforeAction)), calendarQuery, ); const filteredAfterAction = filterRawEntryInfosByCalendarQuery( serverEntryInfosObject(values(pushResult)), calendarQuery, ); return findInconsistentObjectKeys(filteredBeforeAction, filteredAfterAction); } function getInconsistentUserIDsFromReport( request: UserInconsistencyReportCreationRequest, ): Set { const { beforeStateCheck, afterStateCheck } = request; return findInconsistentObjectKeys(beforeStateCheck, afterStateCheck); } export default createReport; diff --git a/keyserver/src/creators/update-creator.js b/keyserver/src/creators/update-creator.js index d091e0665..49f1e25ff 100644 --- a/keyserver/src/creators/update-creator.js +++ b/keyserver/src/creators/update-creator.js @@ -1,662 +1,664 @@ // @flow import invariant from 'invariant'; import { nonThreadCalendarFilters } from 'lib/selectors/calendar-filter-selectors.js'; import { keyForUpdateData, keyForUpdateInfo, rawUpdateInfoFromUpdateData, } from 'lib/shared/update-utils.js'; import type { UpdateInfosRawData, UpdateTypes, } from 'lib/shared/updates/update-spec.js'; import { updateSpecs } from 'lib/shared/updates/update-specs.js'; import { type CalendarQuery, defaultCalendarQuery, type RawEntryInfos, type RawEntryInfo, type FetchEntryInfosBase, } from 'lib/types/entry-types.js'; import { defaultNumberPerThread, type MessageSelectionCriteria, type FetchMessageInfosResult, type RawMessageInfo, } from 'lib/types/message-types.js'; import { type UpdateTarget, redisMessageTypes, type NewUpdatesRedisMessage, } from 'lib/types/redis-types.js'; import type { RawThreadInfos } from 'lib/types/thread-types'; import { type ServerUpdateInfo, type UpdateData, type RawUpdateInfo, type CreateUpdatesResult, } from 'lib/types/update-types.js'; import type { UserInfos, LoggedInUserInfo } from 'lib/types/user-types.js'; import { promiseAll } from 'lib/utils/promises.js'; import createIDs from './id-creator.js'; import { dbQuery, SQL, mergeAndConditions } from '../database/database.js'; import type { SQLStatementType } from '../database/types.js'; import { deleteUpdatesByConditions } from '../deleters/update-deleters.js'; import { fetchEntryInfos, fetchEntryInfosByID, } from '../fetchers/entry-fetchers.js'; import { fetchMessageInfos } from '../fetchers/message-fetchers.js'; import { fetchThreadInfos, type FetchThreadInfosResult, } from '../fetchers/thread-fetchers.js'; import { fetchKnownUserInfos, fetchCurrentUserInfo, } from '../fetchers/user-fetchers.js'; import type { Viewer } from '../session/viewer.js'; import { channelNameForUpdateTarget, publisher } from '../socket/redis.js'; export type UpdatesForCurrentSession = // This is the default if no Viewer is passed, or if an isSocket Viewer is // passed in. We will broadcast to all valid sessions via Redis and return // nothing to the caller, relying on the current session's Redis listener to // pick up the updates and deliver them asynchronously. | 'broadcast' // This is the default if a non-isSocket Viewer is passed in. We avoid // broadcasting the update to the current session, and instead return the // update to the caller, who will handle delivering it to the client. | 'return' // This means we ignore any updates destined for the current session. // Presumably the caller knows what they are doing and has a different way of // communicating the relevant information to the client. | 'ignore'; type DeleteCondition = { +userID: string, +target: ?string, +types: UpdateTypes, }; export type ViewerInfo = | { viewer: Viewer, calendarQuery?: ?CalendarQuery, updatesForCurrentSession?: UpdatesForCurrentSession, } | { viewer: Viewer, calendarQuery: ?CalendarQuery, updatesForCurrentSession?: UpdatesForCurrentSession, threadInfos: RawThreadInfos, }; const defaultUpdateCreationResult = { viewerUpdates: [], userInfos: {} }; const sortFunction = ( a: UpdateData | ServerUpdateInfo, b: UpdateData | ServerUpdateInfo, ) => a.time - b.time; const deleteUpdatesBatchSize = 500; // Creates rows in the updates table based on the inputed updateDatas. Returns // UpdateInfos pertaining to the provided viewerInfo, as well as related // UserInfos. If no viewerInfo is provided, no UpdateInfos will be returned. And // the update row won't have an updater column, meaning no session will be // excluded from the update. async function createUpdates( updateDatas: $ReadOnlyArray, passedViewerInfo?: ?ViewerInfo, ): Promise { if (updateDatas.length === 0) { return defaultUpdateCreationResult; } // viewer.session will throw for a script Viewer let viewerInfo = passedViewerInfo; if ( viewerInfo && (viewerInfo.viewer.isScriptViewer || !viewerInfo.viewer.loggedIn) ) { viewerInfo = null; } const sortedUpdateDatas = [...updateDatas].sort(sortFunction); const filteredUpdateDatas: UpdateData[] = []; const keyedUpdateDatas: Map = new Map(); for (const updateData of sortedUpdateDatas) { const key = keyForUpdateData(updateData); if (!key) { filteredUpdateDatas.push(updateData); continue; } const conditionKey = `${updateData.userID}|${key}`; const deleteCondition = getDeleteCondition(updateData); invariant( deleteCondition, `updateData of type ${updateData.type} has conditionKey ` + `${conditionKey} but no deleteCondition`, ); const curUpdateDatas = keyedUpdateDatas.get(conditionKey); if (!curUpdateDatas) { keyedUpdateDatas.set(conditionKey, [updateData]); continue; } const filteredCurrent = curUpdateDatas.filter(curUpdateData => filterOnDeleteCondition(curUpdateData, deleteCondition), ); if (filteredCurrent.length === 0) { keyedUpdateDatas.set(conditionKey, [updateData]); continue; } const isNewUpdateDataFiltered = !filteredCurrent.every(curUpdateData => { const curDeleteCondition = getDeleteCondition(curUpdateData); invariant( curDeleteCondition, `updateData of type ${curUpdateData.type} is in keyedUpdateDatas ` + "but doesn't have a deleteCondition", ); return filterOnDeleteCondition(updateData, curDeleteCondition); }); if (!isNewUpdateDataFiltered) { filteredCurrent.push(updateData); } keyedUpdateDatas.set(conditionKey, filteredCurrent); } for (const keyUpdateDatas of keyedUpdateDatas.values()) { filteredUpdateDatas.push(...keyUpdateDatas); } const ids = await createIDs('updates', filteredUpdateDatas.length); let updatesForCurrentSession = viewerInfo && viewerInfo.updatesForCurrentSession; if (!updatesForCurrentSession && viewerInfo) { updatesForCurrentSession = viewerInfo.viewer.isSocket ? 'broadcast' : 'return'; } else if (!updatesForCurrentSession) { updatesForCurrentSession = 'broadcast'; } const dontBroadcastSession = updatesForCurrentSession !== 'broadcast' && viewerInfo ? viewerInfo.viewer.session : null; const publishInfos: Map = new Map(); const viewerRawUpdateInfos: RawUpdateInfo[] = []; const insertRows: (?(number | string))[][] = []; const earliestTime: Map = new Map(); for (let i = 0; i < filteredUpdateDatas.length; i++) { const updateData = filteredUpdateDatas[i]; const target = getTargetFromUpdateData(updateData); const rawUpdateInfo = rawUpdateInfoFromUpdateData(updateData, ids[i]); if (!target || !dontBroadcastSession || target !== dontBroadcastSession) { const updateTarget = target ? { userID: updateData.userID, sessionID: target } : { userID: updateData.userID }; const channelName = channelNameForUpdateTarget(updateTarget); let publishInfo = publishInfos.get(channelName); if (!publishInfo) { publishInfo = { updateTarget, rawUpdateInfos: [] }; publishInfos.set(channelName, publishInfo); } publishInfo.rawUpdateInfos.push(rawUpdateInfo); } if ( updatesForCurrentSession === 'return' && viewerInfo && updateData.userID === viewerInfo.viewer.id && (!target || target === viewerInfo.viewer.session) ) { viewerRawUpdateInfos.push(rawUpdateInfo); } if (viewerInfo && target && viewerInfo.viewer.session === target) { // In the case where this update is being created only for the current // session, there's no reason to insert a row into the updates table continue; } const content = updateSpecs[updateData.type].updateContentForServerDB(updateData); const key = keyForUpdateData(updateData); if (key) { const conditionKey = `${updateData.userID}|${key}`; const currentEarliestTime = earliestTime.get(conditionKey); if (!currentEarliestTime || updateData.time < currentEarliestTime) { earliestTime.set(conditionKey, updateData.time); } } const insertRow = [ ids[i], updateData.userID, updateData.type, key, content, updateData.time, dontBroadcastSession, target, ]; insertRows.push(insertRow); } type DeleteUpdatesConditions = { key: string, target?: string, types?: number[], time?: number, }; const usersByConditions: Map< string, { conditions: DeleteUpdatesConditions, users: Set, }, > = new Map(); for (const [conditionKey, keyUpdateDatas] of keyedUpdateDatas) { const deleteConditionByTarget: Map = new Map(); for (const updateData of keyUpdateDatas) { const deleteCondition = getDeleteCondition(updateData); invariant( deleteCondition, `updateData of type ${updateData.type} is in keyedUpdateDatas but ` + "doesn't have a deleteCondition", ); const { target, types } = deleteCondition; const existingDeleteCondition = deleteConditionByTarget.get(target); if (!existingDeleteCondition) { deleteConditionByTarget.set(target, deleteCondition); continue; } const existingTypes = existingDeleteCondition.types; if (existingTypes === 'all_types') { continue; } else if (types === 'all_types') { deleteConditionByTarget.set(target, deleteCondition); continue; } const mergedTypes = new Set([...types, ...existingTypes]); deleteConditionByTarget.set(target, { ...deleteCondition, types: mergedTypes, }); } for (const deleteCondition of deleteConditionByTarget.values()) { const { userID, target, types } = deleteCondition; const key = conditionKey.split('|')[1]; const conditions: DeleteUpdatesConditions = { key }; if (target) { conditions.target = target; } if (types !== 'all_types') { invariant(types.size > 0, 'deleteCondition had empty types set'); conditions.types = [...types]; } const earliestTimeForCondition = earliestTime.get(conditionKey); if (earliestTimeForCondition) { conditions.time = earliestTimeForCondition; } const conditionsKey = JSON.stringify(conditions); if (!usersByConditions.has(conditionsKey)) { usersByConditions.set(conditionsKey, { conditions, users: new Set(), }); } usersByConditions.get(conditionsKey)?.users.add(userID); } } const deleteSQLConditions: SQLStatementType[] = []; for (const { conditions, users } of usersByConditions.values()) { const sqlConditions = [ SQL`u.user IN (${[...users]})`, SQL`u.key = ${conditions.key}`, ]; if (conditions.target) { sqlConditions.push(SQL`u.target = ${conditions.target}`); } if (conditions.types) { sqlConditions.push(SQL`u.type IN (${conditions.types})`); } if (conditions.time) { sqlConditions.push(SQL`u.time < ${conditions.time}`); } deleteSQLConditions.push(mergeAndConditions(sqlConditions)); } const insertPromise = (async () => { if (insertRows.length === 0) { return; } const insertQuery = SQL` INSERT INTO updates(id, user, type, \`key\`, content, time, updater, target) `; insertQuery.append(SQL`VALUES ${insertRows}`); await dbQuery(insertQuery); })(); const redisPromise = publishInfos.size > 0 ? redisPublish(publishInfos.values(), dontBroadcastSession) : Promise.resolve(undefined); const deletePromise = (async () => { if (deleteSQLConditions.length === 0) { return; } while (deleteSQLConditions.length > 0) { const batch = deleteSQLConditions.splice(0, deleteUpdatesBatchSize); await deleteUpdatesByConditions(batch); } })(); const updatesPromise: Promise = (async () => { if (viewerRawUpdateInfos.length === 0) { return undefined; } invariant(viewerInfo, 'should be set'); return await fetchUpdateInfosWithRawUpdateInfos( viewerRawUpdateInfos, viewerInfo, ); })(); const { updatesResult } = await promiseAll({ updatesResult: updatesPromise, insertResult: insertPromise, redisResult: redisPromise, deleteResult: deletePromise, }); if (!updatesResult) { return defaultUpdateCreationResult; } const { updateInfos, userInfos } = updatesResult; return { viewerUpdates: updateInfos, userInfos }; } export type FetchUpdatesResult = { +updateInfos: $ReadOnlyArray, +userInfos: UserInfos, }; async function fetchUpdateInfosWithRawUpdateInfos( rawUpdateInfos: $ReadOnlyArray, viewerInfo: ViewerInfo, ): Promise { const entitiesToFetch = rawUpdateInfos .map(info => updateSpecs[info.type].entitiesToFetch?.(info)) .filter(Boolean); const currentUserNeedsFetch = entitiesToFetch.some( ({ currentUser }) => currentUser, ); const threadIDsNeedingFetch = viewerInfo.threadInfos - ? new Set() - : new Set(entitiesToFetch.map(({ threadID }) => threadID).filter(Boolean)); + ? new Set() + : new Set( + entitiesToFetch.map(({ threadID }) => threadID).filter(Boolean), + ); const entryIDsNeedingFetch = new Set( entitiesToFetch.map(({ entryID }) => entryID).filter(Boolean), ); // entries and messages const threadIDsNeedingDetailedFetch = new Set( entitiesToFetch .map(({ detailedThreadID }) => detailedThreadID) .filter(Boolean), ); const userIDsToFetch = new Set( entitiesToFetch.map(({ userID }) => userID).filter(Boolean), ); const { viewer } = viewerInfo; const threadPromise: Promise = (async () => { if (viewerInfo.threadInfos || threadIDsNeedingFetch.size === 0) { return undefined; } return await fetchThreadInfos(viewer, { threadIDs: threadIDsNeedingFetch, }); })(); let calendarQueryTmp: ?CalendarQuery = viewerInfo.calendarQuery ?? null; if (!calendarQueryTmp && viewer.hasSessionInfo) { // This should only ever happen for "legacy" clients who call in without // providing this information. These clients wouldn't know how to deal with // the corresponding UpdateInfos anyways, so no reason to be worried. calendarQueryTmp = viewer.calendarQuery; } else if (!calendarQueryTmp) { calendarQueryTmp = defaultCalendarQuery(viewer.platform, viewer.timeZone); } const calendarQuery = calendarQueryTmp; const messageInfosPromise: Promise = (async () => { if (threadIDsNeedingDetailedFetch.size === 0) { return undefined; } const threadCursors: { [string]: ?string } = {}; for (const threadID of threadIDsNeedingDetailedFetch) { threadCursors[threadID] = null; } const messageSelectionCriteria: MessageSelectionCriteria = { threadCursors, }; return await fetchMessageInfos( viewer, messageSelectionCriteria, defaultNumberPerThread, ); })(); const calendarPromise: Promise = (async () => { if (threadIDsNeedingDetailedFetch.size === 0) { return undefined; } const threadCalendarQuery = { ...calendarQuery, filters: [ ...nonThreadCalendarFilters(calendarQuery.filters), { type: 'threads', threadIDs: [...threadIDsNeedingDetailedFetch] }, ], }; return await fetchEntryInfos(viewer, [threadCalendarQuery]); })(); const entryInfosPromise: Promise = (async () => { if (entryIDsNeedingFetch.size === 0) { return undefined; } return await fetchEntryInfosByID(viewer, entryIDsNeedingFetch); })(); const currentUserInfoPromise: Promise = (async () => { if (!currentUserNeedsFetch) { return undefined; } const currentUserInfo = await fetchCurrentUserInfo(viewer); invariant(currentUserInfo.anonymous === undefined, 'should be logged in'); return currentUserInfo; })(); const userInfosPromise: Promise = (async () => { if (userIDsToFetch.size === 0) { return undefined; } return await fetchKnownUserInfos(viewer, [...userIDsToFetch]); })(); const { threadResult, messageInfosResult, calendarResult, entryInfosResult, currentUserInfoResult, userInfosResult, } = await promiseAll({ threadResult: threadPromise, messageInfosResult: messageInfosPromise, calendarResult: calendarPromise, entryInfosResult: entryInfosPromise, currentUserInfoResult: currentUserInfoPromise, userInfosResult: userInfosPromise, }); let threadInfos = {}; if (viewerInfo.threadInfos) { threadInfos = viewerInfo.threadInfos; } else if (threadResult) { threadInfos = threadResult.threadInfos; } return await updateInfosFromRawUpdateInfos(viewer, rawUpdateInfos, { threadInfos, messageInfosResult, calendarResult, entryInfosResult, currentUserInfoResult, userInfosResult, }); } async function updateInfosFromRawUpdateInfos( viewer: Viewer, rawUpdateInfos: $ReadOnlyArray, rawData: UpdateInfosRawData, ): Promise { const { messageInfosResult, calendarResult, userInfosResult } = rawData; const rawEntryInfosByThreadID: { [string]: Array } = {}; for (const entryInfo of calendarResult?.rawEntryInfos ?? []) { if (!rawEntryInfosByThreadID[entryInfo.threadID]) { rawEntryInfosByThreadID[entryInfo.threadID] = []; } rawEntryInfosByThreadID[entryInfo.threadID].push(entryInfo); } const rawMessageInfosByThreadID: { [string]: Array } = {}; for (const messageInfo of messageInfosResult?.rawMessageInfos ?? []) { if (!rawMessageInfosByThreadID[messageInfo.threadID]) { rawMessageInfosByThreadID[messageInfo.threadID] = []; } rawMessageInfosByThreadID[messageInfo.threadID].push(messageInfo); } const params = { data: rawData, rawEntryInfosByThreadID, rawMessageInfosByThreadID, }; const updateInfos = rawUpdateInfos .map(update => updateSpecs[update.type].updateInfoFromRawInfo(update, params), ) .filter(Boolean); updateInfos.sort(sortFunction); // Now we'll attempt to merge UpdateInfos so that we only have one per key const updateForKey: Map = new Map(); const mergedUpdates: ServerUpdateInfo[] = []; for (const updateInfo of updateInfos) { const key = keyForUpdateInfo(updateInfo); if (!key) { mergedUpdates.push(updateInfo); continue; } const typesOfReplacedUpdatesForMatchingKey = updateSpecs[updateInfo.type].typesOfReplacedUpdatesForMatchingKey; const currentUpdateInfo = updateForKey.get(key); if ( !currentUpdateInfo || typesOfReplacedUpdatesForMatchingKey === 'all_types' || typesOfReplacedUpdatesForMatchingKey?.has(currentUpdateInfo.type) ) { updateForKey.set(key, updateInfo); } } for (const [, updateInfo] of updateForKey) { mergedUpdates.push(updateInfo); } mergedUpdates.sort(sortFunction); return { updateInfos: mergedUpdates, userInfos: userInfosResult ?? {} }; } type PublishInfo = { updateTarget: UpdateTarget, rawUpdateInfos: RawUpdateInfo[], }; async function redisPublish( publishInfos: Iterator, dontBroadcastSession: ?string, ): Promise { for (const publishInfo of publishInfos) { const { updateTarget, rawUpdateInfos } = publishInfo; const redisMessage: NewUpdatesRedisMessage = { type: redisMessageTypes.NEW_UPDATES, updates: rawUpdateInfos, }; if (!updateTarget.sessionID && dontBroadcastSession) { redisMessage.ignoreSession = dontBroadcastSession; } publisher.sendMessage(updateTarget, redisMessage); } } function getTargetFromUpdateData(updateData: UpdateData): ?string { if (updateData.targetSession) { return updateData.targetSession; } else if (updateData.targetCookie) { return updateData.targetCookie; } else { return null; } } function getDeleteCondition(updateData: UpdateData): ?DeleteCondition { const types = updateSpecs[updateData.type].deleteCondition; if (!types) { return null; } const target = getTargetFromUpdateData(updateData); const { userID } = updateData; return { userID, target, types }; } function filterOnDeleteCondition( updateData: UpdateData, deleteCondition: DeleteCondition, ): boolean { invariant( updateData.userID === deleteCondition.userID, `updateData of type ${updateData.type} being compared to wrong userID`, ); if (deleteCondition.target) { const target = getTargetFromUpdateData(updateData); if (target !== deleteCondition.target) { return true; } } if (deleteCondition.types === 'all_types') { return false; } return !deleteCondition.types.has(updateData.type); } export { createUpdates, fetchUpdateInfosWithRawUpdateInfos }; diff --git a/keyserver/src/cron/update-geoip-db.js b/keyserver/src/cron/update-geoip-db.js index 3c7a56eba..f48be4d93 100644 --- a/keyserver/src/cron/update-geoip-db.js +++ b/keyserver/src/cron/update-geoip-db.js @@ -1,63 +1,64 @@ // @flow import childProcess from 'child_process'; import cluster from 'cluster'; import geoip from 'geoip-lite'; import { getCommConfig } from 'lib/utils/comm-config.js'; import { handleAsyncPromise } from '../responders/handlers.js'; +type GeoIPLicenseConfig = { +key: string }; async function updateGeoipDB(): Promise { - const geoipLicense = await getCommConfig({ + const geoipLicense = await getCommConfig({ folder: 'secrets', name: 'geoip_license', }); if (!geoipLicense) { console.log('no keyserver/secrets/geoip_license.json so skipping update'); return; } await spawnUpdater(geoipLicense); } -function spawnUpdater(geoipLicense: { key: string }): Promise { +function spawnUpdater(geoipLicense: GeoIPLicenseConfig): Promise { const spawned = childProcess.spawn(process.execPath, [ '../node_modules/geoip-lite/scripts/updatedb.js', `license_key=${geoipLicense.key}`, ]); return new Promise((resolve, reject) => { spawned.on('error', reject); spawned.on('exit', () => resolve()); }); } function reloadGeoipDB(): Promise { return new Promise(resolve => geoip.reloadData(resolve)); } type IPCMessage = { type: 'geoip_reload', }; const reloadMessage: IPCMessage = { type: 'geoip_reload' }; async function updateAndReloadGeoipDB(): Promise { await updateGeoipDB(); await reloadGeoipDB(); if (!cluster.isMaster) { return; } for (const id in cluster.workers) { cluster.workers[Number(id)].send(reloadMessage); } } if (!cluster.isMaster) { process.on('message', (ipcMessage: IPCMessage) => { if (ipcMessage.type === 'geoip_reload') { handleAsyncPromise(reloadGeoipDB()); } }); } export { updateAndReloadGeoipDB }; diff --git a/keyserver/src/database/db-config.js b/keyserver/src/database/db-config.js index 8dae4525c..365f3c48b 100644 --- a/keyserver/src/database/db-config.js +++ b/keyserver/src/database/db-config.js @@ -1,70 +1,70 @@ // @flow import invariant from 'invariant'; import { getCommConfig } from 'lib/utils/comm-config.js'; type DBType = 'mariadb10.8'; export type DBConfig = { +host: string, +user: string, +password: string, +database: string, +dbType: DBType, }; function assertValidDBType(dbType: ?string): DBType { invariant( dbType, 'dbType not specified in DB config. Following the MySQL deprecation this ' + 'is a required parameter. Please follow this Gist to migrate to ' + 'MariaDB: ' + 'https://gist.github.com/Ashoat/3a5ded2549db082c5516606f3c3c5da5', ); invariant( dbType !== 'mysql5.7', 'We no longer support MySQL. Please follow this Gist to migrate to ' + 'MariaDB: ' + 'https://gist.github.com/Ashoat/3a5ded2549db082c5516606f3c3c5da5', ); invariant(dbType === 'mariadb10.8', `${dbType} is not a valid dbType`); return dbType; } let dbConfig; async function getDBConfig(): Promise { if (dbConfig !== undefined) { return dbConfig; } if ( process.env.COMM_DATABASE_DATABASE && process.env.COMM_DATABASE_USER && process.env.COMM_DATABASE_PASSWORD ) { dbConfig = { host: process.env.COMM_DATABASE_HOST || 'localhost', user: process.env.COMM_DATABASE_USER, password: process.env.COMM_DATABASE_PASSWORD, database: process.env.COMM_DATABASE_DATABASE, dbType: assertValidDBType(process.env.COMM_DATABASE_TYPE), }; } else { - const importedDBConfig = await getCommConfig({ + const importedDBConfig = await getCommConfig({ folder: 'secrets', name: 'db_config', }); invariant(importedDBConfig, 'DB config missing'); dbConfig = { ...importedDBConfig, dbType: assertValidDBType(importedDBConfig.dbType), }; } return dbConfig; } async function getDBType(): Promise { const config = await getDBConfig(); return config.dbType; } export { getDBConfig, getDBType }; diff --git a/keyserver/src/database/migration-config.js b/keyserver/src/database/migration-config.js index d921910e7..20c2cc203 100644 --- a/keyserver/src/database/migration-config.js +++ b/keyserver/src/database/migration-config.js @@ -1,831 +1,831 @@ // @flow import fs from 'fs'; import bots from 'lib/facts/bots.js'; import genesis from 'lib/facts/genesis.js'; import { policyTypes } from 'lib/facts/policies.js'; import { specialRoles } from 'lib/permissions/special-roles.js'; import { messageTypes } from 'lib/types/message-types-enum.js'; import { threadPermissions } from 'lib/types/thread-permission-types.js'; import { threadTypes } from 'lib/types/thread-types-enum.js'; import { permissionsToRemoveInMigration } from 'lib/utils/migration-utils.js'; import { dbQuery, SQL } from '../database/database.js'; import { processMessagesInDBForSearch } from '../database/search-utils.js'; import { deleteThread } from '../deleters/thread-deleters.js'; import { createScriptViewer } from '../session/scripts.js'; import { fetchOlmAccount } from '../updaters/olm-account-updater.js'; import { updateRolesAndPermissionsForAllThreads } from '../updaters/thread-permission-updaters.js'; import { updateThread } from '../updaters/thread-updaters.js'; import { ensureUserCredentials } from '../user/checks.js'; import { createPickledOlmAccount, publishPrekeysToIdentity, } from '../utils/olm-utils.js'; const botViewer = createScriptViewer(bots.commbot.userID); const migrations: $ReadOnlyMap Promise> = new Map([ [ 0, async () => { await makeSureBaseRoutePathExists('facts/commapp_url.json'); await makeSureBaseRoutePathExists('facts/squadcal_url.json'); }, ], [ 1, async () => { try { await fs.promises.unlink('facts/url.json'); } catch {} }, ], [ 2, async () => { await fixBaseRoutePathForLocalhost('facts/commapp_url.json'); await fixBaseRoutePathForLocalhost('facts/squadcal_url.json'); }, ], [3, updateRolesAndPermissionsForAllThreads], [ 4, async () => { await dbQuery(SQL`ALTER TABLE uploads ADD INDEX container (container)`); }, ], [ 5, async () => { await dbQuery(SQL` ALTER TABLE cookies ADD device_id varchar(255) DEFAULT NULL, ADD public_key varchar(255) DEFAULT NULL, ADD social_proof varchar(255) DEFAULT NULL; `); }, ], [ 7, async () => { await dbQuery( SQL` ALTER TABLE users DROP COLUMN IF EXISTS public_key, MODIFY hash char(60) COLLATE utf8mb4_bin DEFAULT NULL; ALTER TABLE sessions DROP COLUMN IF EXISTS public_key; `, { multipleStatements: true }, ); }, ], [ 8, async () => { await dbQuery( SQL` ALTER TABLE users ADD COLUMN IF NOT EXISTS ethereum_address char(42) DEFAULT NULL; `, ); }, ], [ 9, async () => { await dbQuery( SQL` ALTER TABLE messages ADD COLUMN IF NOT EXISTS target_message bigint(20) DEFAULT NULL; ALTER TABLE messages ADD INDEX target_message (target_message); `, { multipleStatements: true }, ); }, ], [ 10, async () => { await dbQuery(SQL` CREATE TABLE IF NOT EXISTS policy_acknowledgments ( user bigint(20) NOT NULL, policy varchar(255) NOT NULL, date bigint(20) NOT NULL, confirmed tinyint(1) UNSIGNED NOT NULL DEFAULT 0, PRIMARY KEY (user, policy) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; `); }, ], [ 11, async () => { const time = Date.now(); await dbQuery(SQL` INSERT IGNORE INTO policy_acknowledgments (policy, user, date, confirmed) SELECT ${policyTypes.tosAndPrivacyPolicy}, id, ${time}, 1 FROM users `); }, ], [ 12, async () => { await dbQuery(SQL` CREATE TABLE IF NOT EXISTS siwe_nonces ( nonce char(17) NOT NULL, creation_time bigint(20) NOT NULL, PRIMARY KEY (nonce) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; `); }, ], [ 13, async () => { await Promise.all([ writeSquadCalRoute('facts/squadcal_url.json'), moveToNonApacheConfig('facts/commapp_url.json', '/comm/'), moveToNonApacheConfig('facts/landing_url.json', '/commlanding/'), ]); }, ], [ 14, async () => { await dbQuery(SQL` ALTER TABLE cookies MODIFY COLUMN social_proof mediumtext CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL; `); }, ], [ 15, async () => { await dbQuery( SQL` ALTER TABLE uploads ADD COLUMN IF NOT EXISTS thread bigint(20) DEFAULT NULL, ADD INDEX IF NOT EXISTS thread (thread); UPDATE uploads SET thread = ( SELECT thread FROM messages WHERE messages.id = uploads.container ); `, { multipleStatements: true }, ); }, ], [ 16, async () => { await dbQuery( SQL` ALTER TABLE cookies DROP COLUMN IF EXISTS public_key; ALTER TABLE cookies ADD COLUMN IF NOT EXISTS signed_identity_keys mediumtext CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL; `, { multipleStatements: true }, ); }, ], [ 17, async () => { await dbQuery( SQL` ALTER TABLE cookies DROP INDEX device_token, DROP INDEX user_device_token; ALTER TABLE cookies MODIFY device_token mediumtext CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL, ADD UNIQUE KEY device_token (device_token(512)), ADD KEY user_device_token (user,device_token(512)); `, { multipleStatements: true }, ); }, ], [18, updateRolesAndPermissionsForAllThreads], [19, updateRolesAndPermissionsForAllThreads], [ 20, async () => { await dbQuery(SQL` ALTER TABLE threads ADD COLUMN IF NOT EXISTS avatar varchar(191) COLLATE utf8mb4_bin DEFAULT NULL; `); }, ], [ 21, async () => { await dbQuery(SQL` ALTER TABLE reports DROP INDEX IF EXISTS user, ADD INDEX IF NOT EXISTS user_type_platform_creation_time (user, type, platform, creation_time); `); }, ], [ 22, async () => { await dbQuery( SQL` ALTER TABLE cookies MODIFY user varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE entries MODIFY creator varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE focused MODIFY user varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE memberships MODIFY user varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE messages MODIFY user varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE notifications MODIFY user varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE reports MODIFY user varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE revisions MODIFY author varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE sessions MODIFY user varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE threads MODIFY creator varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE updates MODIFY user varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE uploads MODIFY uploader varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE users MODIFY id varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE relationships_undirected MODIFY user1 varchar(255) CHARSET latin1 COLLATE latin1_bin, MODIFY user2 varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE relationships_directed MODIFY user1 varchar(255) CHARSET latin1 COLLATE latin1_bin, MODIFY user2 varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE user_messages MODIFY recipient varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE settings MODIFY user varchar(255) CHARSET latin1 COLLATE latin1_bin; ALTER TABLE policy_acknowledgments MODIFY user varchar(255) CHARSET latin1 COLLATE latin1_bin; `, { multipleStatements: true }, ); }, ], [23, updateRolesAndPermissionsForAllThreads], [24, updateRolesAndPermissionsForAllThreads], [ 25, async () => { await dbQuery( SQL` CREATE TABLE IF NOT EXISTS message_search ( original_message_id bigint(20) NOT NULL, message_id bigint(20) NOT NULL, processed_content mediumtext COLLATE utf8mb4_bin ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; ALTER TABLE message_search ADD PRIMARY KEY (original_message_id), ADD FULLTEXT INDEX processed_content (processed_content); `, { multipleStatements: true }, ); }, ], [26, processMessagesInDBForSearch], [ 27, async () => { await dbQuery(SQL` ALTER TABLE messages ADD COLUMN IF NOT EXISTS pinned tinyint(1) UNSIGNED NOT NULL DEFAULT 0, ADD COLUMN IF NOT EXISTS pin_time bigint(20) DEFAULT NULL, ADD INDEX IF NOT EXISTS thread_pinned (thread, pinned); `); }, ], [ 28, async () => { await dbQuery(SQL` ALTER TABLE threads ADD COLUMN IF NOT EXISTS pinned_count int UNSIGNED NOT NULL DEFAULT 0; `); }, ], [29, updateRolesAndPermissionsForAllThreads], [ 30, async () => { await dbQuery(SQL`DROP TABLE versions;`); }, ], [ 31, async () => { await dbQuery( SQL` CREATE TABLE IF NOT EXISTS invite_links ( id bigint(20) NOT NULL, name varchar(255) CHARSET latin1 NOT NULL, \`primary\` tinyint(1) UNSIGNED NOT NULL DEFAULT 0, role bigint(20) NOT NULL, community bigint(20) NOT NULL, expiration_time bigint(20), limit_of_uses int UNSIGNED, number_of_uses int UNSIGNED NOT NULL DEFAULT 0 ) ENGINE=InnoDB DEFAULT CHARSET=utf8; ALTER TABLE invite_links ADD PRIMARY KEY (id), ADD UNIQUE KEY (name), ADD INDEX community_primary (community, \`primary\`); `, { multipleStatements: true }, ); }, ], [ 32, async () => { await dbQuery(SQL` UPDATE messages SET target_message = JSON_VALUE(content, "$.sourceMessageID") WHERE type = ${messageTypes.SIDEBAR_SOURCE}; `); }, ], [ 33, async () => { await dbQuery( SQL` CREATE TABLE IF NOT EXISTS olm_sessions ( cookie_id bigint(20) NOT NULL, is_content tinyint(1) NOT NULL, version bigint(20) NOT NULL, pickled_olm_session text CHARACTER SET latin1 COLLATE latin1_bin NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_bin; ALTER TABLE olm_sessions ADD PRIMARY KEY (cookie_id, is_content); `, { multipleStatements: true }, ); }, ], [ 34, async () => { await dbQuery( SQL` CREATE TABLE IF NOT EXISTS olm_accounts ( is_content tinyint(1) NOT NULL, version bigint(20) NOT NULL, pickling_key text CHARACTER SET latin1 COLLATE latin1_bin NOT NULL, pickled_olm_account text CHARACTER SET latin1 COLLATE latin1_bin NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_bin; ALTER TABLE olm_accounts ADD PRIMARY KEY (is_content); `, { multipleStatements: true }, ); }, ], [ 35, async () => { await createOlmAccounts(); }, ], [36, updateRolesAndPermissionsForAllThreads], [ 37, async () => { await dbQuery( SQL` DELETE FROM olm_accounts; DELETE FROM olm_sessions; `, { multipleStatements: true }, ); await createOlmAccounts(); }, ], [ 38, async () => { const [result] = await dbQuery(SQL` SELECT t.id FROM threads t INNER JOIN memberships m ON m.thread = t.id AND m.role > 0 INNER JOIN users u ON u.id = m.user WHERE t.type = ${threadTypes.PRIVATE} AND t.name = u.ethereum_address `); const threadIDs = result.map(({ id }) => id.toString()); while (threadIDs.length > 0) { // Batch 10 updateThread calls at a time const batch = threadIDs.splice(0, 10); await Promise.all( batch.map(threadID => updateThread( botViewer, { threadID, changes: { name: '', }, }, { silenceMessages: true, ignorePermissions: true, }, ), ), ); } }, ], [39, ensureUserCredentials], [ 40, // Tokens from identity service are 512 characters long () => dbQuery( SQL` ALTER TABLE metadata MODIFY COLUMN data VARCHAR(1023) `, ), ], [ 41, () => dbQuery( SQL` ALTER TABLE memberships DROP INDEX user, ADD KEY user_role_thread (user, role, thread) `, ), ], [ 42, async () => { await dbQuery(SQL` ALTER TABLE roles ADD UNIQUE KEY thread_name (thread, name); `); }, ], [ 43, () => dbQuery( SQL` UPDATE threads SET pinned_count = ( SELECT COUNT(*) FROM messages WHERE messages.thread = threads.id AND messages.pinned = 1 ) `, ), ], [ 44, async () => { const { SIDEBAR_SOURCE, TOGGLE_PIN } = messageTypes; const [result] = await dbQuery(SQL` SELECT m1.thread FROM messages m1 LEFT JOIN messages m2 ON m2.id = m1.target_message WHERE m1.type = ${SIDEBAR_SOURCE} AND m2.type = ${TOGGLE_PIN} `); - const threadIDs = new Set(); + const threadIDs = new Set(); for (const row of result) { threadIDs.add(row.thread.toString()); } await Promise.all( [...threadIDs].map(threadID => deleteThread(botViewer, { threadID }, { ignorePermissions: true }), ), ); }, ], [ 45, () => dbQuery( SQL` ALTER TABLE uploads CHARSET utf8mb4 COLLATE utf8mb4_bin, MODIFY COLUMN type varchar(255) CHARSET latin1 COLLATE latin1_swedish_ci NOT NULL, MODIFY COLUMN filename varchar(255) CHARSET utf8mb4 COLLATE utf8mb4_bin NOT NULL, MODIFY COLUMN mime varchar(255) CHARSET latin1 COLLATE latin1_swedish_ci NOT NULL, MODIFY COLUMN secret varchar(255) CHARSET latin1 COLLATE latin1_swedish_ci NOT NULL; `, ), ], [ 46, async () => { try { const [content, notif] = await Promise.all([ fetchOlmAccount('content'), fetchOlmAccount('notifications'), ]); await publishPrekeysToIdentity(content.account, notif.account); } catch (e) { console.warn('Encountered error while trying to publish prekeys', e); if (process.env.NODE_ENV !== 'development') { throw e; } } }, ], [ 47, () => dbQuery(SQL`ALTER TABLE cookies MODIFY COLUMN hash char(64) NOT NULL`), ], [ 48, async () => { const visibleExtractString = `$.${threadPermissions.VISIBLE}.value`; const query = SQL` UPDATE memberships mm LEFT JOIN ( SELECT m.thread, MAX(m.id) AS message FROM messages m WHERE m.type != ${messageTypes.CREATE_SUB_THREAD} AND m.thread = ${genesis.id} GROUP BY m.thread ) all_users_query ON mm.thread = all_users_query.thread LEFT JOIN ( SELECT m.thread, stm.user, MAX(m.id) AS message FROM messages m LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content WHERE JSON_EXTRACT(stm.permissions, ${visibleExtractString}) IS TRUE AND m.thread = ${genesis.id} GROUP BY m.thread, stm.user ) last_subthread_message_for_user_query ON mm.thread = last_subthread_message_for_user_query.thread AND mm.user = last_subthread_message_for_user_query.user SET mm.last_message = GREATEST(COALESCE(all_users_query.message, 0), COALESCE(last_subthread_message_for_user_query.message, 0)) WHERE mm.thread = ${genesis.id}; `; await dbQuery(query); }, ], [ 49, async () => { if (isDockerEnvironment()) { return; } const defaultCorsConfig = { domain: 'http://localhost:3000', }; writeJSONToFile(defaultCorsConfig, 'facts/webapp_cors.json'); }, ], [ 50, async () => { moveToNonApacheConfig('facts/webapp_url.json', '/webapp/'); moveToNonApacheConfig('facts/keyserver_url.json', '/keyserver/'); }, ], [ 51, async () => { if (permissionsToRemoveInMigration.length === 0) { return; } const setClause = SQL`permissions = JSON_REMOVE(permissions, ${permissionsToRemoveInMigration.map( path => `$.${path}`, )})`; const updateQuery = SQL` UPDATE roles r LEFT JOIN threads t ON t.id = r.thread `; updateQuery.append(SQL`SET `.append(setClause)); updateQuery.append(SQL` WHERE r.name != 'Admins' AND (t.type = ${threadTypes.COMMUNITY_ROOT} OR t.type = ${threadTypes.COMMUNITY_ANNOUNCEMENT_ROOT}) `); await dbQuery(updateQuery); }, ], [52, updateRolesAndPermissionsForAllThreads], [ 53, async () => dbQuery(SQL` ALTER TABLE invite_links ADD COLUMN blob_holder char(36) CHARSET latin1 `), ], [ 54, async () => { await dbQuery( SQL` ALTER TABLE roles ADD COLUMN IF NOT EXISTS special_role tinyint(2) UNSIGNED DEFAULT NULL, DROP KEY IF EXISTS thread, ADD KEY IF NOT EXISTS thread_special_role (thread, special_role); UPDATE roles r JOIN threads t ON r.id = t.default_role SET r.special_role = ${specialRoles.DEFAULT_ROLE}; `, { multipleStatements: true }, ); }, ], [ 55, async () => { await dbQuery( SQL` ALTER TABLE threads DROP COLUMN IF EXISTS default_role `, ); }, ], ]); const newDatabaseVersion: number = Math.max(...migrations.keys()); async function writeJSONToFile(data: any, filePath: string): Promise { console.warn(`updating ${filePath} to ${JSON.stringify(data)}`); const fileHandle = await fs.promises.open(filePath, 'w'); await fileHandle.writeFile(JSON.stringify(data, null, ' '), 'utf8'); await fileHandle.close(); } async function makeSureBaseRoutePathExists(filePath: string): Promise { let readFile, json; try { readFile = await fs.promises.open(filePath, 'r'); const contents = await readFile.readFile('utf8'); json = JSON.parse(contents); } catch { return; } finally { if (readFile) { await readFile.close(); } } if (json.baseRoutePath) { return; } let baseRoutePath; if (json.baseDomain === 'http://localhost') { baseRoutePath = json.basePath; } else if (filePath.endsWith('commapp_url.json')) { baseRoutePath = '/commweb/'; } else { baseRoutePath = '/'; } const newJSON = { ...json, baseRoutePath }; console.warn(`updating ${filePath} to ${JSON.stringify(newJSON)}`); await writeJSONToFile(newJSON, filePath); } async function fixBaseRoutePathForLocalhost(filePath: string): Promise { let readFile, json; try { readFile = await fs.promises.open(filePath, 'r'); const contents = await readFile.readFile('utf8'); json = JSON.parse(contents); } catch { return; } finally { if (readFile) { await readFile.close(); } } if (json.baseDomain !== 'http://localhost') { return; } const baseRoutePath = '/'; json = { ...json, baseRoutePath }; console.warn(`updating ${filePath} to ${JSON.stringify(json)}`); await writeJSONToFile(json, filePath); } async function moveToNonApacheConfig( filePath: string, routePath: string, ): Promise { if (isDockerEnvironment()) { return; } // Since the non-Apache config is so opinionated, just write expected config const newJSON = { baseDomain: 'http://localhost:3000', basePath: routePath, baseRoutePath: routePath, https: false, proxy: 'none', }; await writeJSONToFile(newJSON, filePath); } async function writeSquadCalRoute(filePath: string): Promise { if (isDockerEnvironment()) { return; } // Since the non-Apache config is so opinionated, just write expected config const newJSON = { baseDomain: 'http://localhost:3000', basePath: '/comm/', baseRoutePath: '/', https: false, proxy: 'apache', }; await writeJSONToFile(newJSON, filePath); } async function createOlmAccounts() { const [pickledContentAccount, pickledNotificationsAccount] = await Promise.all([createPickledOlmAccount(), createPickledOlmAccount()]); await dbQuery( SQL` INSERT INTO olm_accounts (is_content, version, pickling_key, pickled_olm_account) VALUES ( TRUE, 0, ${pickledContentAccount.picklingKey}, ${pickledContentAccount.pickledAccount} ), ( FALSE, 0, ${pickledNotificationsAccount.picklingKey}, ${pickledNotificationsAccount.pickledAccount} ); `, ); } function isDockerEnvironment(): boolean { return !!process.env.COMM_DATABASE_HOST; } export { migrations, newDatabaseVersion, createOlmAccounts }; diff --git a/keyserver/src/endpoints.js b/keyserver/src/endpoints.js index 1a8021a9d..b90051c59 100644 --- a/keyserver/src/endpoints.js +++ b/keyserver/src/endpoints.js @@ -1,580 +1,583 @@ // @flow import t from 'tcomb'; import { baseLegalPolicies } from 'lib/facts/policies.js'; import { setThreadUnreadStatusResultValidator, updateActivityResultValidator, } from 'lib/types/activity-types.js'; import type { Endpoint } from 'lib/types/endpoints.js'; import { inviteLinkValidator } from 'lib/types/link-types.js'; import { uploadMultimediaResultValidator } from 'lib/types/media-types.js'; import { getOlmSessionInitializationDataResponseValidator } from 'lib/types/request-types.js'; import { updateUserAvatarRequestValidator } from 'lib/utils/avatar-utils.js'; import { updateActivityResponder, threadSetUnreadStatusResponder, setThreadUnreadStatusValidator, updateActivityResponderInputValidator, } from './responders/activity-responders.js'; import { deviceTokenUpdateResponder, deviceTokenUpdateRequestInputValidator, } from './responders/device-responders.js'; import { entryFetchResponder, entryRevisionFetchResponder, entryCreationResponder, entryUpdateResponder, entryDeletionResponder, entryRestorationResponder, calendarQueryUpdateResponder, createEntryRequestInputValidator, saveEntryResponseValidator, deleteEntryRequestInputValidator, deleteEntryResponseValidator, entryQueryInputValidator, entryRevisionHistoryFetchInputValidator, fetchEntryInfosResponseValidator, fetchEntryRevisionInfosResultValidator, deltaEntryInfosResultValidator, newEntryQueryInputValidator, restoreEntryRequestInputValidator, restoreEntryResponseValidator, saveEntryRequestInputValidator, } from './responders/entry-responders.js'; import type { JSONResponder } from './responders/handlers.js'; import { createJSONResponder } from './responders/handlers.js'; import { getSessionPublicKeysResponder, getOlmSessionInitializationDataResponder, getSessionPublicKeysInputValidator, getSessionPublicKeysResponseValidator, } from './responders/keys-responders.js'; import { createOrUpdatePublicLinkResponder, disableInviteLinkResponder, fetchPrimaryInviteLinksResponder, inviteLinkVerificationResponder, createOrUpdatePublicLinkInputValidator, disableInviteLinkInputValidator, fetchInviteLinksResponseValidator, inviteLinkVerificationRequestInputValidator, inviteLinkVerificationResponseValidator, } from './responders/link-responders.js'; import { messageReportCreationResponder, messageReportCreationRequestInputValidator, messageReportCreationResultValidator, } from './responders/message-report-responder.js'; import { textMessageCreationResponder, messageFetchResponder, multimediaMessageCreationResponder, reactionMessageCreationResponder, editMessageCreationResponder, fetchPinnedMessagesResponder, searchMessagesResponder, sendMessageResponseValidator, sendMultimediaMessageRequestInputValidator, sendReactionMessageRequestInputValidator, editMessageRequestInputValidator, sendEditMessageResponseValidator, sendTextMessageRequestInputValidator, fetchMessageInfosRequestInputValidator, fetchMessageInfosResponseValidator, fetchPinnedMessagesResponderInputValidator, fetchPinnedMessagesResultValidator, searchMessagesResponderInputValidator, searchMessagesResponseValidator, } from './responders/message-responders.js'; import { getInitialReduxStateResponder, initialReduxStateRequestValidator, initialReduxStateValidator, } from './responders/redux-state-responders.js'; import { updateRelationshipsResponder, relationshipErrorsValidator, updateRelationshipInputValidator, } from './responders/relationship-responders.js'; import { reportCreationResponder, reportMultiCreationResponder, errorReportFetchInfosResponder, reportCreationRequestInputValidator, reportCreationResponseValidator, fetchErrorReportInfosRequestInputValidator, fetchErrorReportInfosResponseValidator, reportMultiCreationRequestInputValidator, } from './responders/report-responders.js'; import { userSearchResponder, exactUserSearchResponder, exactUserSearchRequestInputValidator, exactUserSearchResultValidator, userSearchRequestInputValidator, userSearchResultValidator, } from './responders/search-responders.js'; import { siweNonceResponder, siweNonceResponseValidator, } from './responders/siwe-nonce-responders.js'; import { threadDeletionResponder, roleUpdateResponder, memberRemovalResponder, threadLeaveResponder, threadUpdateResponder, threadCreationResponder, threadFetchMediaResponder, threadJoinResponder, toggleMessagePinResponder, roleModificationResponder, roleDeletionResponder, leaveThreadResultValidator, newThreadRequestInputValidator, newThreadResponseValidator, threadDeletionRequestInputValidator, joinThreadRequestInputValidator, leaveThreadRequestInputValidator, threadFetchMediaRequestInputValidator, threadFetchMediaResultValidator, threadJoinResultValidator, changeThreadSettingsResultValidator, removeMembersRequestInputValidator, roleChangeRequestInputValidator, toggleMessagePinRequestInputValidator, toggleMessagePinResultValidator, updateThreadRequestInputValidator, roleDeletionRequestInputValidator, roleDeletionResultValidator, roleModificationRequestInputValidator, roleModificationResultValidator, } from './responders/thread-responders.js'; import { userSubscriptionUpdateResponder, passwordUpdateResponder, sendVerificationEmailResponder, sendPasswordResetEmailResponder, logOutResponder, accountDeletionResponder, accountCreationResponder, logInResponder, siweAuthResponder, oldPasswordUpdateResponder, updateUserSettingsResponder, policyAcknowledgmentResponder, updateUserAvatarResponder, registerRequestInputValidator, registerResponseValidator, logOutResponseValidator, logInRequestInputValidator, logInResponseValidator, policyAcknowledgmentRequestInputValidator, accountUpdateInputValidator, resetPasswordRequestInputValidator, siweAuthRequestInputValidator, subscriptionUpdateRequestInputValidator, subscriptionUpdateResponseValidator, updatePasswordRequestInputValidator, updateUserAvatarResponderValidator, updateUserSettingsInputValidator, claimUsernameResponder, claimUsernameResponseValidator, } from './responders/user-responders.js'; import { codeVerificationResponder, codeVerificationRequestInputValidator, } from './responders/verification-responders.js'; import { versionResponder, versionResponseValidator, } from './responders/version-responders.js'; import { uploadMediaMetadataResponder, uploadDeletionResponder, UploadDeletionRequestInputValidator, uploadMediaMetadataInputValidator, } from './uploads/uploads.js'; -const ignoredArgumentValidator = t.irreducible('Ignored argument', () => true); +const ignoredArgumentValidator = t.irreducible( + 'Ignored argument', + () => true, +); const jsonEndpoints: { [id: Endpoint]: JSONResponder } = { create_account: createJSONResponder( accountCreationResponder, registerRequestInputValidator, registerResponseValidator, [], ), create_entry: createJSONResponder( entryCreationResponder, createEntryRequestInputValidator, saveEntryResponseValidator, baseLegalPolicies, ), create_error_report: createJSONResponder( reportCreationResponder, reportCreationRequestInputValidator, reportCreationResponseValidator, [], ), create_message_report: createJSONResponder( messageReportCreationResponder, messageReportCreationRequestInputValidator, messageReportCreationResultValidator, baseLegalPolicies, ), create_multimedia_message: createJSONResponder( multimediaMessageCreationResponder, sendMultimediaMessageRequestInputValidator, sendMessageResponseValidator, baseLegalPolicies, ), create_or_update_public_link: createJSONResponder( createOrUpdatePublicLinkResponder, createOrUpdatePublicLinkInputValidator, inviteLinkValidator, baseLegalPolicies, ), create_reaction_message: createJSONResponder( reactionMessageCreationResponder, sendReactionMessageRequestInputValidator, sendMessageResponseValidator, baseLegalPolicies, ), disable_invite_link: createJSONResponder( disableInviteLinkResponder, disableInviteLinkInputValidator, t.Nil, baseLegalPolicies, ), edit_message: createJSONResponder( editMessageCreationResponder, editMessageRequestInputValidator, sendEditMessageResponseValidator, baseLegalPolicies, ), create_report: createJSONResponder( reportCreationResponder, reportCreationRequestInputValidator, reportCreationResponseValidator, [], ), create_reports: createJSONResponder( reportMultiCreationResponder, reportMultiCreationRequestInputValidator, t.Nil, [], ), create_text_message: createJSONResponder( textMessageCreationResponder, sendTextMessageRequestInputValidator, sendMessageResponseValidator, baseLegalPolicies, ), create_thread: createJSONResponder( threadCreationResponder, newThreadRequestInputValidator, newThreadResponseValidator, baseLegalPolicies, ), delete_account: createJSONResponder( accountDeletionResponder, ignoredArgumentValidator, logOutResponseValidator, [], ), delete_entry: createJSONResponder( entryDeletionResponder, deleteEntryRequestInputValidator, deleteEntryResponseValidator, baseLegalPolicies, ), delete_community_role: createJSONResponder( roleDeletionResponder, roleDeletionRequestInputValidator, roleDeletionResultValidator, baseLegalPolicies, ), delete_thread: createJSONResponder( threadDeletionResponder, threadDeletionRequestInputValidator, leaveThreadResultValidator, baseLegalPolicies, ), delete_upload: createJSONResponder( uploadDeletionResponder, UploadDeletionRequestInputValidator, t.Nil, baseLegalPolicies, ), exact_search_user: createJSONResponder( exactUserSearchResponder, exactUserSearchRequestInputValidator, exactUserSearchResultValidator, [], ), fetch_entries: createJSONResponder( entryFetchResponder, entryQueryInputValidator, fetchEntryInfosResponseValidator, baseLegalPolicies, ), fetch_entry_revisions: createJSONResponder( entryRevisionFetchResponder, entryRevisionHistoryFetchInputValidator, fetchEntryRevisionInfosResultValidator, baseLegalPolicies, ), fetch_error_report_infos: createJSONResponder( errorReportFetchInfosResponder, fetchErrorReportInfosRequestInputValidator, fetchErrorReportInfosResponseValidator, baseLegalPolicies, ), fetch_messages: createJSONResponder( messageFetchResponder, fetchMessageInfosRequestInputValidator, fetchMessageInfosResponseValidator, baseLegalPolicies, ), fetch_pinned_messages: createJSONResponder( fetchPinnedMessagesResponder, fetchPinnedMessagesResponderInputValidator, fetchPinnedMessagesResultValidator, baseLegalPolicies, ), fetch_primary_invite_links: createJSONResponder( fetchPrimaryInviteLinksResponder, ignoredArgumentValidator, fetchInviteLinksResponseValidator, baseLegalPolicies, ), fetch_thread_media: createJSONResponder( threadFetchMediaResponder, threadFetchMediaRequestInputValidator, threadFetchMediaResultValidator, baseLegalPolicies, ), get_initial_redux_state: createJSONResponder( getInitialReduxStateResponder, initialReduxStateRequestValidator, initialReduxStateValidator, [], ), get_session_public_keys: createJSONResponder( getSessionPublicKeysResponder, getSessionPublicKeysInputValidator, getSessionPublicKeysResponseValidator, baseLegalPolicies, ), join_thread: createJSONResponder( threadJoinResponder, joinThreadRequestInputValidator, threadJoinResultValidator, baseLegalPolicies, ), leave_thread: createJSONResponder( threadLeaveResponder, leaveThreadRequestInputValidator, leaveThreadResultValidator, baseLegalPolicies, ), log_in: createJSONResponder( logInResponder, logInRequestInputValidator, logInResponseValidator, [], ), log_out: createJSONResponder( logOutResponder, ignoredArgumentValidator, logOutResponseValidator, [], ), modify_community_role: createJSONResponder( roleModificationResponder, roleModificationRequestInputValidator, roleModificationResultValidator, baseLegalPolicies, ), policy_acknowledgment: createJSONResponder( policyAcknowledgmentResponder, policyAcknowledgmentRequestInputValidator, t.Nil, [], ), remove_members: createJSONResponder( memberRemovalResponder, removeMembersRequestInputValidator, changeThreadSettingsResultValidator, baseLegalPolicies, ), restore_entry: createJSONResponder( entryRestorationResponder, restoreEntryRequestInputValidator, restoreEntryResponseValidator, baseLegalPolicies, ), search_messages: createJSONResponder( searchMessagesResponder, searchMessagesResponderInputValidator, searchMessagesResponseValidator, baseLegalPolicies, ), search_users: createJSONResponder( userSearchResponder, userSearchRequestInputValidator, userSearchResultValidator, baseLegalPolicies, ), send_password_reset_email: createJSONResponder( sendPasswordResetEmailResponder, resetPasswordRequestInputValidator, t.Nil, [], ), send_verification_email: createJSONResponder( sendVerificationEmailResponder, ignoredArgumentValidator, t.Nil, [], ), set_thread_unread_status: createJSONResponder( threadSetUnreadStatusResponder, setThreadUnreadStatusValidator, setThreadUnreadStatusResultValidator, baseLegalPolicies, ), toggle_message_pin: createJSONResponder( toggleMessagePinResponder, toggleMessagePinRequestInputValidator, toggleMessagePinResultValidator, baseLegalPolicies, ), update_account: createJSONResponder( passwordUpdateResponder, accountUpdateInputValidator, t.Nil, baseLegalPolicies, ), update_activity: createJSONResponder( updateActivityResponder, updateActivityResponderInputValidator, updateActivityResultValidator, baseLegalPolicies, ), update_calendar_query: createJSONResponder( calendarQueryUpdateResponder, newEntryQueryInputValidator, deltaEntryInfosResultValidator, baseLegalPolicies, ), update_user_settings: createJSONResponder( updateUserSettingsResponder, updateUserSettingsInputValidator, t.Nil, baseLegalPolicies, ), update_device_token: createJSONResponder( deviceTokenUpdateResponder, deviceTokenUpdateRequestInputValidator, t.Nil, [], ), update_entry: createJSONResponder( entryUpdateResponder, saveEntryRequestInputValidator, saveEntryResponseValidator, baseLegalPolicies, ), update_password: createJSONResponder( oldPasswordUpdateResponder, updatePasswordRequestInputValidator, logInResponseValidator, baseLegalPolicies, ), update_relationships: createJSONResponder( updateRelationshipsResponder, updateRelationshipInputValidator, relationshipErrorsValidator, baseLegalPolicies, ), update_role: createJSONResponder( roleUpdateResponder, roleChangeRequestInputValidator, changeThreadSettingsResultValidator, baseLegalPolicies, ), update_thread: createJSONResponder( threadUpdateResponder, updateThreadRequestInputValidator, changeThreadSettingsResultValidator, baseLegalPolicies, ), update_user_subscription: createJSONResponder( userSubscriptionUpdateResponder, subscriptionUpdateRequestInputValidator, subscriptionUpdateResponseValidator, baseLegalPolicies, ), verify_code: createJSONResponder( codeVerificationResponder, codeVerificationRequestInputValidator, t.Nil, baseLegalPolicies, ), verify_invite_link: createJSONResponder( inviteLinkVerificationResponder, inviteLinkVerificationRequestInputValidator, inviteLinkVerificationResponseValidator, baseLegalPolicies, ), siwe_nonce: createJSONResponder( siweNonceResponder, ignoredArgumentValidator, siweNonceResponseValidator, [], ), siwe_auth: createJSONResponder( siweAuthResponder, siweAuthRequestInputValidator, logInResponseValidator, [], ), claim_username: createJSONResponder( claimUsernameResponder, ignoredArgumentValidator, claimUsernameResponseValidator, [], ), update_user_avatar: createJSONResponder( updateUserAvatarResponder, updateUserAvatarRequestValidator, updateUserAvatarResponderValidator, baseLegalPolicies, ), upload_media_metadata: createJSONResponder( uploadMediaMetadataResponder, uploadMediaMetadataInputValidator, uploadMultimediaResultValidator, baseLegalPolicies, ), get_olm_session_initialization_data: createJSONResponder( getOlmSessionInitializationDataResponder, ignoredArgumentValidator, getOlmSessionInitializationDataResponseValidator, [], ), version: createJSONResponder( versionResponder, ignoredArgumentValidator, versionResponseValidator, [], ), }; export { jsonEndpoints }; diff --git a/keyserver/src/fetchers/message-fetchers.js b/keyserver/src/fetchers/message-fetchers.js index 037eb2325..7626d98a6 100644 --- a/keyserver/src/fetchers/message-fetchers.js +++ b/keyserver/src/fetchers/message-fetchers.js @@ -1,1036 +1,1036 @@ // @flow import invariant from 'invariant'; import { sortMessageInfoList, shimUnsupportedRawMessageInfos, isInvalidSidebarSource, isUnableToBeRenderedIndependently, isInvalidPinSource, } from 'lib/shared/message-utils.js'; import { messageSpecs } from 'lib/shared/messages/message-specs.js'; import { getNotifCollapseKey } from 'lib/shared/notif-utils.js'; import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; import { messageTypes, type MessageType, assertMessageType, } from 'lib/types/message-types-enum.js'; import { type RawMessageInfo, type RawComposableMessageInfo, type RawRobotextMessageInfo, type EditMessageContent, type MessageSelectionCriteria, type MessageTruncationStatus, messageTruncationStatus, type FetchMessageInfosResult, defaultMaxMessageAge, type FetchPinnedMessagesRequest, type FetchPinnedMessagesResult, type SearchMessagesResponse, type MessageTruncationStatuses, } from 'lib/types/message-types.js'; import { defaultNumberPerThread } from 'lib/types/message-types.js'; import { threadPermissions } from 'lib/types/thread-permission-types.js'; import { ServerError } from 'lib/utils/errors.js'; import { constructMediaFromMediaMessageContentsAndUploadRows, imagesFromRow, } from './upload-fetchers.js'; import { dbQuery, SQL, mergeOrConditions, mergeAndConditions, } from '../database/database.js'; import { processQueryForSearch } from '../database/search-utils.js'; import type { SQLStatementType } from '../database/types.js'; import type { PushInfo } from '../push/send.js'; import type { Viewer } from '../session/viewer.js'; import { creationString, localIDFromCreationString, } from '../utils/idempotent.js'; export type CollapsableNotifInfo = { collapseKey: ?string, existingMessageInfos: RawMessageInfo[], newMessageInfos: RawMessageInfo[], }; export type FetchCollapsableNotifsResult = { [userID: string]: CollapsableNotifInfo[], }; const visibleExtractString = `$.${threadPermissions.VISIBLE}.value`; // This function doesn't filter RawMessageInfos based on what messageTypes the // client supports, since each user can have multiple clients. The caller must // handle this filtering. async function fetchCollapsableNotifs( pushInfo: PushInfo, ): Promise { // First, we need to fetch any notifications that should be collapsed const usersToCollapseKeysToInfo: { [string]: { [string]: CollapsableNotifInfo }, } = {}; const usersToCollapsableNotifInfo: { [string]: Array } = {}; for (const userID in pushInfo) { usersToCollapseKeysToInfo[userID] = {}; usersToCollapsableNotifInfo[userID] = []; for (let i = 0; i < pushInfo[userID].messageInfos.length; i++) { const rawMessageInfo = pushInfo[userID].messageInfos[i]; const messageData = pushInfo[userID].messageDatas[i]; const collapseKey = getNotifCollapseKey(rawMessageInfo, messageData); if (!collapseKey) { const collapsableNotifInfo: CollapsableNotifInfo = { collapseKey, existingMessageInfos: [], newMessageInfos: [rawMessageInfo], }; usersToCollapsableNotifInfo[userID].push(collapsableNotifInfo); continue; } if (!usersToCollapseKeysToInfo[userID][collapseKey]) { usersToCollapseKeysToInfo[userID][collapseKey] = ({ collapseKey, existingMessageInfos: [], newMessageInfos: [], }: CollapsableNotifInfo); } usersToCollapseKeysToInfo[userID][collapseKey].newMessageInfos.push( rawMessageInfo, ); } } const sqlTuples = []; for (const userID in usersToCollapseKeysToInfo) { const collapseKeysToInfo = usersToCollapseKeysToInfo[userID]; for (const collapseKey in collapseKeysToInfo) { sqlTuples.push( SQL`(n.user = ${userID} AND n.collapse_key = ${collapseKey})`, ); } } if (sqlTuples.length === 0) { return usersToCollapsableNotifInfo; } const collapseQuery = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, n.user, n.collapse_key, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM notifications n LEFT JOIN messages m ON m.id = n.message LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = n.user LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = n.user WHERE n.rescinded = 0 AND JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE AND `; collapseQuery.append(mergeOrConditions(sqlTuples)); collapseQuery.append(SQL`ORDER BY m.time DESC, m.id DESC`); const [collapseResult] = await dbQuery(collapseQuery); const rowsByUser = new Map(); for (const row of collapseResult) { const user = row.user.toString(); const currentRowsForUser = rowsByUser.get(user); if (currentRowsForUser) { currentRowsForUser.push(row); } else { rowsByUser.set(user, [row]); } } const derivedMessages = await fetchDerivedMessages(collapseResult); for (const userRows of rowsByUser.values()) { const messages = parseMessageSQLResult(userRows, derivedMessages); for (const message of messages) { const { rawMessageInfo, rows } = message; const [row] = rows; const info = usersToCollapseKeysToInfo[row.user][row.collapse_key]; info.existingMessageInfos.push(rawMessageInfo); } } for (const userID in usersToCollapseKeysToInfo) { const collapseKeysToInfo = usersToCollapseKeysToInfo[userID]; for (const collapseKey in collapseKeysToInfo) { const info = collapseKeysToInfo[collapseKey]; usersToCollapsableNotifInfo[userID].push({ collapseKey: info.collapseKey, existingMessageInfos: sortMessageInfoList(info.existingMessageInfos), newMessageInfos: sortMessageInfoList(info.newMessageInfos), }); } } return usersToCollapsableNotifInfo; } type MessageSQLResult = $ReadOnlyArray<{ rawMessageInfo: RawMessageInfo, rows: $ReadOnlyArray, }>; function parseMessageSQLResult( rows: $ReadOnlyArray, derivedMessages: $ReadOnlyMap< string, RawComposableMessageInfo | RawRobotextMessageInfo, >, viewer?: Viewer, ): MessageSQLResult { const rowsByID = new Map(); for (const row of rows) { const id = row.id.toString(); const currentRowsForID = rowsByID.get(id); if (currentRowsForID) { currentRowsForID.push(row); } else { rowsByID.set(id, [row]); } } const messages = []; for (const messageRows of rowsByID.values()) { const rawMessageInfo = rawMessageInfoFromRows( messageRows, viewer, derivedMessages, ); if (rawMessageInfo) { messages.push({ rawMessageInfo, rows: messageRows }); } } return messages; } function assertSingleRow(rows: $ReadOnlyArray): Object { if (rows.length === 0) { throw new Error('expected single row, but none present!'); } else if (rows.length !== 1) { const messageIDs = rows.map(row => row.id.toString()); console.warn( `expected single row, but there are multiple! ${messageIDs.join(', ')}`, ); } return rows[0]; } function mostRecentRowType(rows: $ReadOnlyArray): MessageType { if (rows.length === 0) { throw new Error('expected row, but none present!'); } return assertMessageType(rows[0].type); } function rawMessageInfoFromRows( rawRows: $ReadOnlyArray, viewer?: Viewer, derivedMessages: $ReadOnlyMap< string, RawComposableMessageInfo | RawRobotextMessageInfo, >, ): ?RawMessageInfo { const rows = rawRows.map(row => ({ ...row, subthread_permissions: JSON.parse(row.subthread_permissions), })); const type = mostRecentRowType(rows); const messageSpec = messageSpecs[type]; const requiresDerivedMessages = messageSpec.parseDerivedMessages !== undefined; if (type === messageTypes.IMAGES || type === messageTypes.MULTIMEDIA) { let media; if (type === messageTypes.MULTIMEDIA) { const mediaMessageContents = JSON.parse(rows[0].content); media = constructMediaFromMediaMessageContentsAndUploadRows( mediaMessageContents, rows, ); } else { media = rows.filter(row => row.uploadID).map(imagesFromRow); } const [row] = rows; const localID = localIDFromCreationString(viewer, row.creation); let rawMessageInfoFromServerDBRowParams = { localID, media }; if (requiresDerivedMessages) { rawMessageInfoFromServerDBRowParams = { ...rawMessageInfoFromServerDBRowParams, derivedMessages, }; } invariant( messageSpec.rawMessageInfoFromServerDBRow, `multimedia message spec should have rawMessageInfoFromServerDBRow`, ); return messageSpec.rawMessageInfoFromServerDBRow( row, rawMessageInfoFromServerDBRowParams, ); } const row = assertSingleRow(rows); const localID = localIDFromCreationString(viewer, row.creation); let rawMessageInfoFromServerDBRowParams = { localID }; if (requiresDerivedMessages) { rawMessageInfoFromServerDBRowParams = { ...rawMessageInfoFromServerDBRowParams, derivedMessages, }; } invariant( messageSpec.rawMessageInfoFromServerDBRow, `message spec ${type} should have rawMessageInfoFromServerDBRow`, ); return messageSpec.rawMessageInfoFromServerDBRow( row, rawMessageInfoFromServerDBRowParams, ); } async function fetchMessageInfos( viewer: Viewer, criteria: MessageSelectionCriteria, numberPerThread: number, ): Promise { const { sqlClause: selectionClause, timeFilterData } = parseMessageSelectionCriteria(viewer, criteria); const truncationStatuses: MessageTruncationStatuses = {}; const viewerID = viewer.id; const query = SQL` WITH thread_window AS ( SELECT m.id, m.thread AS threadID, m.user AS creatorID, m.target_message as targetMessageID, m.content, m.time, m.type, m.creation, stm.permissions AS subthread_permissions, ROW_NUMBER() OVER ( PARTITION BY threadID ORDER BY m.time DESC, m.id DESC ) n FROM messages m LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = ${viewerID} LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = ${viewerID} WHERE JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE AND `; query.append(selectionClause); query.append(SQL` ) SELECT tw.*, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM thread_window tw LEFT JOIN uploads up ON up.container = tw.id WHERE tw.n <= ${numberPerThread} ORDER BY tw.threadID, tw.time DESC, tw.id DESC `); const [result] = await dbQuery(query); const derivedMessages = await fetchDerivedMessages(result, viewer); const messages = await parseMessageSQLResult(result, derivedMessages, viewer); const rawMessageInfos = []; - const threadToMessageCount = new Map(); + const threadToMessageCount = new Map(); for (const message of messages) { const { rawMessageInfo } = message; rawMessageInfos.push(rawMessageInfo); const { threadID } = rawMessageInfo; const currentCountValue = threadToMessageCount.get(threadID); const currentCount = currentCountValue ? currentCountValue : 0; threadToMessageCount.set(threadID, currentCount + 1); } for (const [threadID, messageCount] of threadToMessageCount) { // If we matched the exact amount we limited to, we're probably truncating // our result set. By setting TRUNCATED here, we tell the client that the // result set might not be continguous with what's already in their // MessageStore. More details about TRUNCATED can be found in // lib/types/message-types.js if (messageCount >= numberPerThread) { // We won't set TRUNCATED if a cursor was specified for a given thread, // since then the result is guaranteed to be contiguous with what the // client has if (criteria.threadCursors && criteria.threadCursors[threadID]) { truncationStatuses[threadID] = messageTruncationStatus.UNCHANGED; } else { truncationStatuses[threadID] = messageTruncationStatus.TRUNCATED; } continue; } const hasTimeFilter = hasTimeFilterForThread(timeFilterData, threadID); if (!hasTimeFilter) { // If there is no time filter for a given thread, and there are fewer // messages returned than the max we queried for a given thread, we can // conclude that our result set includes all messages for that thread truncationStatuses[threadID] = messageTruncationStatus.EXHAUSTIVE; } } for (const rawMessageInfo of rawMessageInfos) { if (messageSpecs[rawMessageInfo.type].startsThread) { truncationStatuses[rawMessageInfo.threadID] = messageTruncationStatus.EXHAUSTIVE; } } for (const threadID in criteria.threadCursors) { const truncationStatus = truncationStatuses[threadID]; if (truncationStatus !== null && truncationStatus !== undefined) { continue; } const hasTimeFilter = hasTimeFilterForThread(timeFilterData, threadID); if (!hasTimeFilter) { // If there is no time filter for a given thread, and zero messages were // returned, we can conclude that this thread has zero messages. This is // a case of database corruption that should not be possible, but likely // we have some threads like this on prod (either due to some transient // issues or due to old buggy code) truncationStatuses[threadID] = messageTruncationStatus.EXHAUSTIVE; } else { // If this thread was explicitly queried for, and we got no results, but // we can't conclude that it's EXHAUSTIVE, then we'll set to UNCHANGED. truncationStatuses[threadID] = messageTruncationStatus.UNCHANGED; } } const shimmedRawMessageInfos = shimUnsupportedRawMessageInfos( rawMessageInfos, viewer.platformDetails, ); return { rawMessageInfos: shimmedRawMessageInfos, truncationStatuses, }; } // ESLint doesn't recognize that invariant always throws // eslint-disable-next-line consistent-return function hasTimeFilterForThread( timeFilterData: TimeFilterData, threadID: string, ) { if (timeFilterData.timeFilter === 'ALL') { return true; } else if (timeFilterData.timeFilter === 'NONE') { return false; } else if (timeFilterData.timeFilter === 'ALL_EXCEPT_EXCLUDED') { return !timeFilterData.excludedFromTimeFilter.has(threadID); } else { invariant( false, `unrecognized timeFilter type ${timeFilterData.timeFilter}`, ); } } type TimeFilterData = | { +timeFilter: 'ALL' | 'NONE' } | { +timeFilter: 'ALL_EXCEPT_EXCLUDED', +excludedFromTimeFilter: $ReadOnlySet, }; type ParsedMessageSelectionCriteria = { +sqlClause: SQLStatementType, +timeFilterData: TimeFilterData, }; function parseMessageSelectionCriteria( viewer: Viewer, criteria: MessageSelectionCriteria, ): ParsedMessageSelectionCriteria { const minMessageTime = Date.now() - defaultMaxMessageAge; const shouldApplyTimeFilter = hasMinCodeVersion(viewer.platformDetails, { native: 130, }); let globalTimeFilter; if (criteria.newerThan) { globalTimeFilter = SQL`m.time > ${criteria.newerThan}`; } else if (!criteria.threadCursors && shouldApplyTimeFilter) { globalTimeFilter = SQL` (m.time > ${minMessageTime} OR m.id = mm.last_message) `; } const threadConditions = []; if ( criteria.joinedThreads === true && shouldApplyTimeFilter && !globalTimeFilter ) { threadConditions.push(SQL` (mm.role > 0 AND (m.time > ${minMessageTime} OR m.id = mm.last_message)) `); } else if (criteria.joinedThreads === true) { threadConditions.push(SQL`mm.role > 0`); } if (criteria.threadCursors) { for (const threadID in criteria.threadCursors) { const cursor = criteria.threadCursors[threadID]; if (cursor) { threadConditions.push( SQL`(m.thread = ${threadID} AND m.id < ${cursor})`, ); } else { threadConditions.push(SQL`m.thread = ${threadID}`); } } } if (threadConditions.length === 0) { throw new ServerError('internal_error'); } const threadClause = mergeOrConditions(threadConditions); let timeFilterData; if (globalTimeFilter) { timeFilterData = { timeFilter: 'ALL' }; } else if (!shouldApplyTimeFilter) { timeFilterData = { timeFilter: 'NONE' }; } else { invariant( criteria.threadCursors, 'ALL_EXCEPT_EXCLUDED should correspond to threadCursors being set', ); const excludedFromTimeFilter = new Set(Object.keys(criteria.threadCursors)); timeFilterData = { timeFilter: 'ALL_EXCEPT_EXCLUDED', excludedFromTimeFilter, }; } const conditions = [globalTimeFilter, threadClause].filter(Boolean); const sqlClause = mergeAndConditions(conditions); return { sqlClause, timeFilterData }; } function messageSelectionCriteriaToInitialTruncationStatuses( criteria: MessageSelectionCriteria, defaultTruncationStatus: MessageTruncationStatus, ) { const truncationStatuses: MessageTruncationStatuses = {}; if (criteria.threadCursors) { for (const threadID in criteria.threadCursors) { truncationStatuses[threadID] = defaultTruncationStatus; } } return truncationStatuses; } async function fetchMessageInfosSince( viewer: Viewer, criteria: MessageSelectionCriteria, maxNumberPerThread: number, ): Promise { const { sqlClause: selectionClause } = parseMessageSelectionCriteria( viewer, criteria, ); const truncationStatuses = messageSelectionCriteriaToInitialTruncationStatuses( criteria, messageTruncationStatus.UNCHANGED, ); const viewerID = viewer.id; const query = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM messages m LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = ${viewerID} LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = ${viewerID} WHERE JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE AND `; query.append(selectionClause); query.append(SQL` ORDER BY m.thread, m.time DESC, m.id DESC `); const [result] = await dbQuery(query); const derivedMessages = await fetchDerivedMessages(result, viewer); const messages = await parseMessageSQLResult(result, derivedMessages, viewer); const rawMessageInfos = []; let currentThreadID = null; let numMessagesForCurrentThreadID = 0; for (const message of messages) { const { rawMessageInfo } = message; const { threadID } = rawMessageInfo; if (threadID !== currentThreadID) { currentThreadID = threadID; numMessagesForCurrentThreadID = 1; truncationStatuses[threadID] = messageTruncationStatus.UNCHANGED; } else { numMessagesForCurrentThreadID++; } if (numMessagesForCurrentThreadID <= maxNumberPerThread) { if (messageSpecs[rawMessageInfo.type].startsThread) { truncationStatuses[threadID] = messageTruncationStatus.EXHAUSTIVE; } rawMessageInfos.push(rawMessageInfo); } else if (numMessagesForCurrentThreadID === maxNumberPerThread + 1) { truncationStatuses[threadID] = messageTruncationStatus.TRUNCATED; } } const shimmedRawMessageInfos = shimUnsupportedRawMessageInfos( rawMessageInfos, viewer.platformDetails, ); return { rawMessageInfos: shimmedRawMessageInfos, truncationStatuses, }; } function getMessageFetchResultFromRedisMessages( viewer: Viewer, rawMessageInfos: $ReadOnlyArray, ): FetchMessageInfosResult { const truncationStatuses: MessageTruncationStatuses = {}; for (const rawMessageInfo of rawMessageInfos) { truncationStatuses[rawMessageInfo.threadID] = messageTruncationStatus.UNCHANGED; } const shimmedRawMessageInfos = shimUnsupportedRawMessageInfos( rawMessageInfos, viewer.platformDetails, ); return { rawMessageInfos: shimmedRawMessageInfos, truncationStatuses, }; } async function fetchMessageInfoForLocalID( viewer: Viewer, localID: ?string, ): Promise { if (!localID || !viewer.hasSessionInfo) { return null; } const creation = creationString(viewer, localID); const viewerID = viewer.id; const query = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM messages m LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = ${viewerID} LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = ${viewerID} WHERE m.user = ${viewerID} AND m.creation = ${creation} AND JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE `; const [result] = await dbQuery(query); if (result.length === 0) { return null; } const derivedMessages = await fetchDerivedMessages(result, viewer); return rawMessageInfoFromRows(result, viewer, derivedMessages); } const entryIDExtractString = '$.entryID'; async function fetchMessageInfoForEntryAction( viewer: Viewer, messageType: MessageType, entryID: string, threadID: string, ): Promise { const viewerID = viewer.id; const query = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message AS targetMessageID, NULL AS subthread_permissions, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM messages m LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = ${viewerID} WHERE m.user = ${viewerID} AND m.thread = ${threadID} AND m.type = ${messageType} AND JSON_EXTRACT(m.content, ${entryIDExtractString}) = ${entryID} AND JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE `; const [result] = await dbQuery(query); if (result.length === 0) { return null; } const derivedMessages = await fetchDerivedMessages(result, viewer); return rawMessageInfoFromRows(result, viewer, derivedMessages); } async function fetchMessageRowsByIDs(messageIDs: $ReadOnlyArray) { const query = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM messages m LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = m.user WHERE m.id IN (${messageIDs}) `; const [result] = await dbQuery(query); return result; } async function fetchPinnedMessageInfos( viewer: Viewer, request: FetchPinnedMessagesRequest, ): Promise { // The only message types that can be pinned are 0, 14, and 15 // (text, images, and multimedia), so we don't need to worry about // an admin pinning a message about creating a secret subchannel. This is // why we don't check subthread permissions (as opposed to other queries). const messageRowsQuery = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, NULL AS subthread_permissions, u.id AS uploadID, u.type AS uploadType, u.secret AS uploadSecret, u.extra AS uploadExtra FROM messages m LEFT JOIN uploads u ON u.container = m.id LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = ${viewer.id} WHERE m.thread = ${request.threadID} AND m.pinned = 1 AND JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE ORDER BY m.pin_time DESC `; const [messageRows] = await dbQuery(messageRowsQuery); if (messageRows.length === 0) { return { pinnedMessages: [] }; } const pinnedAndRelatedMessages = await rawMessageInfoForRowsAndRelatedMessages(messageRows, viewer); const shimmedPinnedRawMessageInfos = shimUnsupportedRawMessageInfos( pinnedAndRelatedMessages, viewer.platformDetails, ); return { pinnedMessages: shimmedPinnedRawMessageInfos, }; } async function fetchDerivedMessages( rows: $ReadOnlyArray, viewer?: Viewer, ): Promise< $ReadOnlyMap, > { const requiredIDs = new Set(); for (const row of rows) { // parseDerivedMessages should be defined for SIDEBAR_SOURCE and TOGGLE_PIN const { parseDerivedMessages } = messageSpecs[row.type]; parseDerivedMessages?.(row, requiredIDs); } const messagesByID = new Map< string, RawComposableMessageInfo | RawRobotextMessageInfo, >(); if (requiredIDs.size === 0) { return messagesByID; } const [result, edits] = await Promise.all([ fetchMessageRowsByIDs([...requiredIDs]), fetchLatestEditMessageContentByIDs([...requiredIDs]), ]); const messages = await parseMessageSQLResult(result, new Map(), viewer); for (const message of messages) { let { rawMessageInfo } = message; if (rawMessageInfo.type === messageTypes.SIDEBAR_SOURCE) { invariant( !isInvalidSidebarSource(rawMessageInfo), 'SIDEBAR_SOURCE should not point to a ' + 'SIDEBAR_SOURCE, REACTION, EDIT_MESSAGE or TOGGLE_PIN', ); } if (rawMessageInfo.type === messageTypes.TOGGLE_PIN) { invariant( !isInvalidPinSource(rawMessageInfo), 'TOGGLE_PIN should not point to a non-composable message type', ); } if (rawMessageInfo.id) { const editedContent = edits.get(rawMessageInfo.id); if (editedContent && rawMessageInfo.type === messageTypes.TEXT) { rawMessageInfo = { ...rawMessageInfo, text: editedContent.text, }; } invariant(rawMessageInfo.id, 'rawMessageInfo.id should not be null'); // Flow doesn't refine the types if we don't explicitly invariant on // or check against all of the unexpected message types, and that list // can both get long and generally defeats the purpose of moving the // logic into message specs to have one 'single source of truth'. // $FlowFixMe messagesByID.set(rawMessageInfo.id, rawMessageInfo); } } return messagesByID; } async function fetchMessageInfoByID( viewer?: Viewer, messageID: string, ): Promise { const result = await fetchMessageRowsByIDs([messageID]); if (result.length === 0) { return null; } const derivedMessages = await fetchDerivedMessages(result, viewer); return rawMessageInfoFromRows(result, viewer, derivedMessages); } async function fetchThreadMessagesCount(threadID: string): Promise { const query = SQL` SELECT COUNT(*) AS count FROM messages WHERE thread = ${threadID} `; const [result] = await dbQuery(query); return result[0].count; } async function fetchLatestEditMessageContentByIDs( messageIDs: $ReadOnlyArray, ): Promise<$ReadOnlyMap> { const latestEditedMessageQuery = SQL` SELECT m.id, ( SELECT m2.content FROM messages m2 WHERE m.id = m2.target_message AND m.thread = m2.thread AND m2.type = ${messageTypes.EDIT_MESSAGE} ORDER BY time DESC, id DESC LIMIT 1 ) content FROM messages m WHERE m.id IN(${messageIDs}) `; const [result] = await dbQuery(latestEditedMessageQuery); - const latestContentByID = new Map(); + const latestContentByID = new Map(); for (const row of result) { if (!row.content) { continue; } const content = JSON.parse(row.content); latestContentByID.set(row.id.toString(), content); } return latestContentByID; } async function fetchLatestEditMessageContentByID( messageID: string, ): Promise { const result = await fetchLatestEditMessageContentByIDs([messageID]); const content = result.get(messageID); return content; } async function fetchRelatedMessages( viewer?: Viewer, messages: $ReadOnlyMap< string, RawComposableMessageInfo | RawRobotextMessageInfo, >, ): Promise<$ReadOnlyArray> { if (messages.size === 0) { return []; } const originalMessageIDs = [...messages.keys()]; const query = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM messages m LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = m.user WHERE m.target_message IN (${originalMessageIDs}) AND ( m.type = ${messageTypes.SIDEBAR_SOURCE} OR m.type = ${messageTypes.REACTION} OR m.type = ${messageTypes.TOGGLE_PIN} ) UNION SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM messages m2 INNER JOIN messages m ON m.id = ( SELECT m3.id FROM messages m3 WHERE m3.target_message = m2.id AND m3.thread = m2.thread AND m3.type = ${messageTypes.EDIT_MESSAGE} ORDER BY time DESC, id DESC LIMIT 1 ) LEFT JOIN uploads up ON up.container = m2.id LEFT JOIN memberships stm ON m2.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m2.content AND stm.user = m2.user WHERE m2.id IN (${originalMessageIDs}) `; const [resultRows] = await dbQuery(query); if (resultRows.length === 0) { return []; } const SQLResult = await parseMessageSQLResult(resultRows, messages, viewer); return SQLResult.map(item => item.rawMessageInfo); } async function rawMessageInfoForRowsAndRelatedMessages( rows: $ReadOnlyArray, viewer?: Viewer, ): Promise<$ReadOnlyArray> { const parsedResults = await parseMessageSQLResult(rows, new Map(), viewer); const rawMessageInfoMap = new Map< string, RawComposableMessageInfo | RawRobotextMessageInfo, >(); for (const message of parsedResults) { const { rawMessageInfo } = message; if (isUnableToBeRenderedIndependently(rawMessageInfo)) { continue; } invariant(rawMessageInfo.id, 'rawMessageInfo.id should not be null'); // Flow fails to refine types correctly since // isUnableToBeRenderedIndependently introspects into a message spec // instead of directly checking message types. We use "continue" to avoid // invalid messages, but Flow doesn't recognize this. The // alternative is to check against every message type, but that defeats // the purpose of a 'single source of truth.' // $FlowFixMe rawMessageInfoMap.set(rawMessageInfo.id, rawMessageInfo); } const rawMessageInfos = parsedResults.map(item => item.rawMessageInfo); const rawRelatedMessageInfos = await fetchRelatedMessages( viewer, rawMessageInfoMap, ); return [...rawMessageInfos, ...rawRelatedMessageInfos]; } const searchMessagesPageSize = defaultNumberPerThread + 1; async function searchMessagesInSingleChat( inputQuery: string, threadID: string, viewer?: Viewer, cursor?: string, ): Promise { if (inputQuery === '') { console.warn('received empty search query'); return { messages: [], endReached: true }; } const pattern = processQueryForSearch(inputQuery); if (pattern === '') { return { endReached: true, messages: [], }; } const query = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM message_search s LEFT JOIN messages m ON m.id = s.original_message_id LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = m.user LEFT JOIN uploads up ON up.container = m.id LEFT JOIN messages m2 ON m2.target_message = m.id AND m2.type = ${messageTypes.SIDEBAR_SOURCE} AND m2.thread = ${threadID} WHERE MATCH(s.processed_content) AGAINST(${pattern} IN BOOLEAN MODE) AND (m.thread = ${threadID} OR m2.id IS NOT NULL) `; if (cursor) { query.append(SQL`AND m.id < ${cursor} `); } query.append(SQL` ORDER BY m.time DESC, m.id DESC LIMIT ${searchMessagesPageSize} `); const [results] = await dbQuery(query); if (results.length === 0) { return { messages: [], endReached: true }; } const endReached = results.length < searchMessagesPageSize; const resultsPage = endReached ? results : results.slice(0, -1); const rawMessageInfos = await rawMessageInfoForRowsAndRelatedMessages( resultsPage, viewer, ); return { messages: shimUnsupportedRawMessageInfos( rawMessageInfos, viewer?.platformDetails, ), endReached: endReached, }; } export { fetchCollapsableNotifs, fetchMessageInfos, fetchMessageInfosSince, getMessageFetchResultFromRedisMessages, fetchMessageInfoForLocalID, fetchMessageInfoForEntryAction, fetchMessageInfoByID, fetchThreadMessagesCount, fetchLatestEditMessageContentByID, fetchPinnedMessageInfos, searchMessagesInSingleChat, }; diff --git a/keyserver/src/fetchers/thread-permission-fetchers.js b/keyserver/src/fetchers/thread-permission-fetchers.js index 7cf766696..007fa75f1 100644 --- a/keyserver/src/fetchers/thread-permission-fetchers.js +++ b/keyserver/src/fetchers/thread-permission-fetchers.js @@ -1,366 +1,368 @@ // @flow import genesis from 'lib/facts/genesis.js'; import { permissionLookup, makePermissionsBlob, getRoleForPermissions, } from 'lib/permissions/thread-permissions.js'; import { relationshipBlockedInEitherDirection } from 'lib/shared/relationship-utils.js'; import { threadFrozenDueToBlock, permissionsDisabledByBlock, } from 'lib/shared/thread-utils.js'; import { userRelationshipStatus } from 'lib/types/relationship-types.js'; import type { ThreadPermission, ThreadPermissionsBlob, ThreadRolePermissionsBlob, } from 'lib/types/thread-permission-types.js'; import type { ThreadType } from 'lib/types/thread-types-enum.js'; import { fetchThreadInfos } from './thread-fetchers.js'; import { fetchKnownUserInfos } from './user-fetchers.js'; import { dbQuery, SQL } from '../database/database.js'; import type { Viewer } from '../session/viewer.js'; // Note that it's risky to verify permissions by inspecting the blob directly. // There are other factors that can override permissions in the permissions // blob, such as when one user blocks another. It's always better to go through // checkThreads and friends, or by looking at the ThreadInfo through // threadHasPermission. async function fetchThreadPermissionsBlob( viewer: Viewer, threadID: string, ): Promise { const viewerID = viewer.id; const query = SQL` SELECT permissions FROM memberships WHERE thread = ${threadID} AND user = ${viewerID} `; const [result] = await dbQuery(query); if (result.length === 0) { return null; } const row = result[0]; return JSON.parse(row.permissions); } function checkThreadPermission( viewer: Viewer, threadID: string, permission: ThreadPermission, ): Promise { return checkThread(viewer, threadID, [{ check: 'permission', permission }]); } function viewerIsMember(viewer: Viewer, threadID: string): Promise { return checkThread(viewer, threadID, [{ check: 'is_member' }]); } type Check = | { +check: 'is_member' } | { +check: 'permission', +permission: ThreadPermission }; function isThreadValid( permissions: ?ThreadPermissionsBlob, role: number, checks: $ReadOnlyArray, ): boolean { for (const check of checks) { if (check.check === 'is_member') { if (role <= 0) { return false; } } else if (check.check === 'permission') { if (!permissionLookup(permissions, check.permission)) { return false; } } } return true; } async function checkThreads( viewer: Viewer, threadIDs: $ReadOnlyArray, checks: $ReadOnlyArray, ): Promise> { if (viewer.isScriptViewer) { // script viewers are all-powerful return new Set(threadIDs); } const threadRows = await getValidThreads(viewer, threadIDs, checks); return new Set(threadRows.map(row => row.threadID)); } type PartialMembershipRow = { +threadID: string, +role: number, +permissions: ThreadPermissionsBlob, }; async function getValidThreads( viewer: Viewer, threadIDs: $ReadOnlyArray, checks: $ReadOnlyArray, ): Promise { const query = SQL` SELECT thread AS threadID, permissions, role FROM memberships WHERE thread IN (${threadIDs}) AND user = ${viewer.userID} `; const permissionsToCheck = []; for (const check of checks) { if (check.check === 'permission') { permissionsToCheck.push(check.permission); } } const [[result], disabledThreadIDs] = await Promise.all([ dbQuery(query), checkThreadsFrozen(viewer, permissionsToCheck, threadIDs), ]); return result .map(row => ({ ...row, threadID: row.threadID.toString(), permissions: JSON.parse(row.permissions), })) .filter( row => isThreadValid(row.permissions, row.role, checks) && !disabledThreadIDs.has(row.threadID), ); } async function checkThreadsFrozen( viewer: Viewer, permissionsToCheck: $ReadOnlyArray, threadIDs: $ReadOnlyArray, -) { - const threadIDsWithDisabledPermissions = new Set(); +): Promise<$ReadOnlySet> { + const threadIDsWithDisabledPermissions = new Set(); const permissionMightBeDisabled = permissionsToCheck.some(permission => permissionsDisabledByBlock.has(permission), ); if (!permissionMightBeDisabled) { return threadIDsWithDisabledPermissions; } const [{ threadInfos }, userInfos] = await Promise.all([ fetchThreadInfos(viewer, { threadIDs: new Set(threadIDs) }), fetchKnownUserInfos(viewer), ]); for (const threadID in threadInfos) { const blockedThread = threadFrozenDueToBlock( threadInfos[threadID], viewer.id, userInfos, ); if (blockedThread) { threadIDsWithDisabledPermissions.add(threadID); } } return threadIDsWithDisabledPermissions; } async function checkIfThreadIsBlocked( viewer: Viewer, threadID: string, permission: ThreadPermission, ): Promise { const disabledThreadIDs = await checkThreadsFrozen( viewer, [permission], [threadID], ); return disabledThreadIDs.has(threadID); } async function checkThread( viewer: Viewer, threadID: string, checks: $ReadOnlyArray, ): Promise { const validThreads = await checkThreads(viewer, [threadID], checks); return validThreads.has(threadID); } // We pass this into getRoleForPermissions in order to check if a hypothetical // permissions blob would block membership by returning a non-positive result. // It doesn't matter what value we pass in, as long as it's positive. const arbitraryPositiveRole = '1'; +type ContainingStatus = 'member' | 'non-member' | 'no-containing-thread'; + type CandidateMembers = { +[key: string]: ?$ReadOnlyArray, }; type ValidateCandidateMembersParams = { +threadType: ThreadType, +parentThreadID: ?string, +containingThreadID: ?string, +defaultRolePermissions: ThreadRolePermissionsBlob, }; type ValidateCandidateMembersOptions = { +requireRelationship?: boolean }; async function validateCandidateMembers( viewer: Viewer, candidates: CandidateMembers, params: ValidateCandidateMembersParams, options?: ValidateCandidateMembersOptions, ): Promise { const requireRelationship = options?.requireRelationship ?? true; - const allCandidatesSet = new Set(); + const allCandidatesSet = new Set(); for (const key in candidates) { const candidateGroup = candidates[key]; if (!candidateGroup) { continue; } for (const candidate of candidateGroup) { allCandidatesSet.add(candidate); } } const allCandidates = [...allCandidatesSet]; const fetchMembersPromise = fetchKnownUserInfos(viewer, allCandidates); const parentPermissionsPromise = (async () => { const parentPermissions = {}; if (!params.parentThreadID || allCandidates.length === 0) { return parentPermissions; } const parentPermissionsQuery = SQL` SELECT user, permissions FROM memberships WHERE thread = ${params.parentThreadID} AND user IN (${allCandidates}) `; const [result] = await dbQuery(parentPermissionsQuery); for (const row of result) { parentPermissions[row.user.toString()] = JSON.parse(row.permissions); } return parentPermissions; })(); const memberOfContainingThreadPromise: Promise< - Map, + Map, > = (async () => { - const results = new Map(); + const results = new Map(); if (allCandidates.length === 0) { return results; } if (!params.containingThreadID) { for (const userID of allCandidates) { results.set(userID, 'no-containing-thread'); } return results; } const memberOfContainingThreadQuery = SQL` SELECT user, role AS containing_role FROM memberships WHERE thread = ${params.containingThreadID} AND user IN (${allCandidates}) `; const [result] = await dbQuery(memberOfContainingThreadQuery); for (const row of result) { results.set( row.user.toString(), row.containing_role > 0 ? 'member' : 'non-member', ); } return results; })(); const [fetchedMembers, parentPermissions, memberOfContainingThread] = await Promise.all([ fetchMembersPromise, parentPermissionsPromise, memberOfContainingThreadPromise, ]); - const ignoreMembers = new Set(); + const ignoreMembers = new Set(); for (const memberID of allCandidates) { const member = fetchedMembers[memberID]; if (!member && requireRelationship) { ignoreMembers.add(memberID); continue; } const relationshipStatus = member?.relationshipStatus; const memberRelationshipHasBlock = !!( relationshipStatus && relationshipBlockedInEitherDirection(relationshipStatus) ); if (memberRelationshipHasBlock) { ignoreMembers.add(memberID); continue; } const permissionsFromParent = parentPermissions[memberID]; if (memberOfContainingThread.get(memberID) === 'non-member') { ignoreMembers.add(memberID); continue; } const isParentThreadGenesis = params.parentThreadID === genesis.id; if ( (memberOfContainingThread.get(memberID) === 'no-containing-thread' || isParentThreadGenesis) && relationshipStatus !== userRelationshipStatus.FRIEND && requireRelationship ) { ignoreMembers.add(memberID); continue; } const permissions = makePermissionsBlob( params.defaultRolePermissions, permissionsFromParent, '-1', params.threadType, ); if (!permissions) { ignoreMembers.add(memberID); continue; } const targetRole = getRoleForPermissions( arbitraryPositiveRole, permissions, ); if (Number(targetRole) <= 0) { ignoreMembers.add(memberID); continue; } } if (ignoreMembers.size === 0) { return candidates; } const result: { [string]: ?$ReadOnlyArray } = {}; for (const key in candidates) { const candidateGroup = candidates[key]; if (!candidateGroup) { result[key] = candidateGroup; continue; } const resultForKey = []; for (const candidate of candidateGroup) { if (!ignoreMembers.has(candidate)) { resultForKey.push(candidate); } } result[key] = resultForKey; } return result; } export { fetchThreadPermissionsBlob, checkThreadPermission, viewerIsMember, checkThreads, getValidThreads, checkThread, checkIfThreadIsBlocked, validateCandidateMembers, }; diff --git a/keyserver/src/push/providers.js b/keyserver/src/push/providers.js index 98ceaf9b3..41a3c52b9 100644 --- a/keyserver/src/push/providers.js +++ b/keyserver/src/push/providers.js @@ -1,186 +1,214 @@ // @flow import apn from '@parse/node-apn'; import type { Provider as APNProvider } from '@parse/node-apn'; import fcmAdmin from 'firebase-admin'; import type { FirebaseApp } from 'firebase-admin'; import invariant from 'invariant'; import webpush from 'web-push'; import type { PlatformDetails } from 'lib/types/device-types'; import { getCommConfig } from 'lib/utils/comm-config.js'; type APNPushProfile = 'apn_config' | 'comm_apn_config'; function getAPNPushProfileForCodeVersion( platformDetails: PlatformDetails, ): APNPushProfile { if (platformDetails.platform === 'macos') { return 'comm_apn_config'; } return platformDetails.codeVersion && platformDetails.codeVersion >= 87 ? 'comm_apn_config' : 'apn_config'; } type FCMPushProfile = 'fcm_config' | 'comm_fcm_config'; function getFCMPushProfileForCodeVersion(codeVersion: ?number): FCMPushProfile { return codeVersion && codeVersion >= 87 ? 'comm_fcm_config' : 'fcm_config'; } -const cachedAPNProviders = new Map(); +type APNConfig = { + +token: { + +key: string, + +keyId: string, + +teamId: string, + }, + +production: boolean, +}; + +const cachedAPNProviders = new Map(); async function getAPNProvider(profile: APNPushProfile): Promise { const provider = cachedAPNProviders.get(profile); if (provider !== undefined) { return provider; } try { - const apnConfig = await getCommConfig({ folder: 'secrets', name: profile }); + const apnConfig = await getCommConfig({ + folder: 'secrets', + name: profile, + }); invariant(apnConfig, `APN config missing for ${profile}`); if (!cachedAPNProviders.has(profile)) { cachedAPNProviders.set(profile, new apn.Provider(apnConfig)); } } catch { if (!cachedAPNProviders.has(profile)) { cachedAPNProviders.set(profile, null); } } return cachedAPNProviders.get(profile); } -const cachedFCMProviders = new Map(); +type FCMConfig = { + +type: string, + +project_id: string, + +private_key_id: string, + +private_key: string, + +client_email: string, + +client_id: string, + +auth_uri: string, + +token_uri: string, + +auth_provider_x509_cert_url: string, + +client_x509_cert_url: string, +}; + +const cachedFCMProviders = new Map(); async function getFCMProvider(profile: FCMPushProfile): Promise { const provider = cachedFCMProviders.get(profile); if (provider !== undefined) { return provider; } try { - const fcmConfig = await getCommConfig({ folder: 'secrets', name: profile }); + const fcmConfig = await getCommConfig({ + folder: 'secrets', + name: profile, + }); invariant(fcmConfig, `FCM config missed for ${profile}`); if (!cachedFCMProviders.has(profile)) { cachedFCMProviders.set( profile, fcmAdmin.initializeApp( { credential: fcmAdmin.credential.cert(fcmConfig), }, profile, ), ); } } catch { if (!cachedFCMProviders.has(profile)) { cachedFCMProviders.set(profile, null); } } return cachedFCMProviders.get(profile); } function endFirebase() { fcmAdmin.apps?.forEach(app => app?.delete()); } function endAPNs() { for (const provider of cachedAPNProviders.values()) { provider?.shutdown(); } } function getAPNsNotificationTopic(platformDetails: PlatformDetails): string { if (platformDetails.platform === 'macos') { return 'app.comm.macos'; } return platformDetails.codeVersion && platformDetails.codeVersion >= 87 ? 'app.comm' : 'org.squadcal.app'; } type WebPushConfig = { +publicKey: string, +privateKey: string }; let cachedWebPushConfig: ?WebPushConfig = null; async function getWebPushConfig(): Promise { if (cachedWebPushConfig) { return cachedWebPushConfig; } cachedWebPushConfig = await getCommConfig({ folder: 'secrets', name: 'web_push_config', }); if (cachedWebPushConfig) { webpush.setVapidDetails( 'mailto:support@comm.app', cachedWebPushConfig.publicKey, cachedWebPushConfig.privateKey, ); } return cachedWebPushConfig; } async function ensureWebPushInitialized() { if (cachedWebPushConfig) { return; } await getWebPushConfig(); } type WNSConfig = { +tenantID: string, +appID: string, +secret: string }; type WNSAccessToken = { +token: string, +expires: number }; let wnsAccessToken: ?WNSAccessToken = null; async function getWNSToken(): Promise { const expiryWindowInMs = 10_000; if ( wnsAccessToken && wnsAccessToken.expires >= Date.now() - expiryWindowInMs ) { return wnsAccessToken.token; } const config = await getCommConfig({ folder: 'secrets', name: 'wns_config', }); if (!config) { return null; } const params = new URLSearchParams(); params.append('grant_type', 'client_credentials'); params.append('client_id', config.appID); params.append('client_secret', config.secret); params.append('scope', 'https://wns.windows.com/.default'); try { const response = await fetch( `https://login.microsoftonline.com/${config.tenantID}/oauth2/v2.0/token`, { method: 'POST', body: params }, ); if (!response.ok) { console.error('Failure when getting the WNS token: ', response); return null; } const responseJson = await response.json(); wnsAccessToken = { token: responseJson['access_token'], expires: Date.now() + responseJson['expires_in'] * 1000, }; } catch (err) { console.error('Failure when getting the WNS token: ', err); return null; } return wnsAccessToken?.token; } export { getAPNPushProfileForCodeVersion, getFCMPushProfileForCodeVersion, getAPNProvider, getFCMProvider, endFirebase, endAPNs, getAPNsNotificationTopic, getWebPushConfig, ensureWebPushInitialized, getWNSToken, }; diff --git a/keyserver/src/push/rescind.js b/keyserver/src/push/rescind.js index d3edb89a2..a2b8855ea 100644 --- a/keyserver/src/push/rescind.js +++ b/keyserver/src/push/rescind.js @@ -1,379 +1,379 @@ // @flow import apn from '@parse/node-apn'; import type { ResponseFailure } from '@parse/node-apn'; import type { FirebaseError } from 'firebase-admin'; import invariant from 'invariant'; import type { PlatformDetails } from 'lib/types/device-types.js'; import { threadSubscriptions } from 'lib/types/subscription-types.js'; import { threadPermissions } from 'lib/types/thread-permission-types.js'; import { promiseAll } from 'lib/utils/promises.js'; import { tID } from 'lib/utils/validation-utils.js'; import { prepareEncryptedAndroidNotificationRescinds, prepareEncryptedIOSNotificationRescind, } from './crypto.js'; import { getAPNsNotificationTopic } from './providers.js'; import type { NotificationTargetDevice, TargetedAndroidNotification, TargetedAPNsNotification, } from './types.js'; import { apnPush, fcmPush, type APNPushResult, type FCMPushResult, } from './utils.js'; import createIDs from '../creators/id-creator.js'; import { dbQuery, SQL } from '../database/database.js'; import type { SQLStatementType } from '../database/types.js'; import { validateOutput } from '../utils/validation-utils.js'; type ParsedDelivery = { +platform: 'ios' | 'macos' | 'android', +codeVersion: ?number, +stateVersion: ?number, +notificationID: string, +deviceTokens: $ReadOnlyArray, }; type RescindDelivery = { source: 'rescind', rescindedID: string, errors?: | $ReadOnlyArray | $ReadOnlyArray, }; async function rescindPushNotifs( notifCondition: SQLStatementType, inputCountCondition?: SQLStatementType, ) { const notificationExtractString = `$.${threadSubscriptions.home}`; const visPermissionExtractString = `$.${threadPermissions.VISIBLE}.value`; const fetchQuery = SQL` SELECT n.id, n.user, n.thread, n.message, n.delivery, n.collapse_key, COUNT( `; fetchQuery.append(inputCountCondition ? inputCountCondition : SQL`m.thread`); fetchQuery.append(SQL` ) AS unread_count FROM notifications n LEFT JOIN memberships m ON m.user = n.user AND m.last_message > m.last_read_message AND m.role > 0 AND JSON_EXTRACT(subscription, ${notificationExtractString}) AND JSON_EXTRACT(permissions, ${visPermissionExtractString}) WHERE n.rescinded = 0 AND `); fetchQuery.append(notifCondition); fetchQuery.append(SQL` GROUP BY n.id, m.user`); const [fetchResult] = await dbQuery(fetchQuery); - const allDeviceTokens = new Set(); + const allDeviceTokens = new Set(); const parsedDeliveries: { [string]: $ReadOnlyArray } = {}; for (const row of fetchResult) { const rawDelivery = JSON.parse(row.delivery); const deliveries = Array.isArray(rawDelivery) ? rawDelivery : [rawDelivery]; const id = row.id.toString(); const rowParsedDeliveries = []; for (const delivery of deliveries) { if ( delivery.iosID || delivery.deviceType === 'ios' || delivery.deviceType === 'macos' ) { const deviceTokens = delivery.iosDeviceTokens ?? delivery.deviceTokens; rowParsedDeliveries.push({ notificationID: delivery.iosID, codeVersion: delivery.codeVersion, stateVersion: delivery.stateVersion, platform: delivery.deviceType ?? 'ios', deviceTokens, }); deviceTokens.forEach(deviceToken => allDeviceTokens.add(deviceToken)); } else if (delivery.androidID || delivery.deviceType === 'android') { const deviceTokens = delivery.androidDeviceTokens ?? delivery.deviceTokens; rowParsedDeliveries.push({ notificationID: row.collapse_key ? row.collapse_key : id, codeVersion: delivery.codeVersion, stateVersion: delivery.stateVersion, platform: 'android', deviceTokens, }); deviceTokens.forEach(deviceToken => allDeviceTokens.add(deviceToken)); } } parsedDeliveries[id] = rowParsedDeliveries; } const deviceTokenToCookieID = await getDeviceTokenToCookieID(allDeviceTokens); const deliveryPromises: { [string]: Promise | Promise, } = {}; const notifInfo = {}; const rescindedIDs = []; for (const row of fetchResult) { const id = row.id.toString(); const threadID = row.thread.toString(); notifInfo[id] = { userID: row.user.toString(), threadID, messageID: row.message.toString(), }; for (const delivery of parsedDeliveries[id]) { let platformDetails: PlatformDetails = { platform: delivery.platform }; if (delivery.codeVersion) { platformDetails = { ...platformDetails, codeVersion: delivery.codeVersion, }; } if (delivery.stateVersion) { platformDetails = { ...platformDetails, stateVersion: delivery.stateVersion, }; } if (delivery.platform === 'ios') { const devices = delivery.deviceTokens.map(deviceToken => ({ deviceToken, cookieID: deviceTokenToCookieID[deviceToken], })); const deliveryPromise = (async () => { const targetedNotifications = await prepareIOSNotification( delivery.notificationID, row.unread_count, threadID, platformDetails, devices, ); return await apnPush({ targetedNotifications, platformDetails: { platform: 'ios', codeVersion: delivery.codeVersion, }, }); })(); deliveryPromises[id] = deliveryPromise; } else if (delivery.platform === 'android') { const devices = delivery.deviceTokens.map(deviceToken => ({ deviceToken, cookieID: deviceTokenToCookieID[deviceToken], })); const deliveryPromise = (async () => { const targetedNotifications = await prepareAndroidNotification( delivery.notificationID, row.unread_count, threadID, platformDetails, devices, ); return await fcmPush({ targetedNotifications, codeVersion: delivery.codeVersion, }); })(); deliveryPromises[id] = deliveryPromise; } } rescindedIDs.push(id); } const numRescinds = Object.keys(deliveryPromises).length; const dbIDsPromise: Promise> = (async () => { if (numRescinds === 0) { return undefined; } return await createIDs('notifications', numRescinds); })(); const rescindPromise: Promise = (async () => { if (rescindedIDs.length === 0) { return undefined; } const rescindQuery = SQL` UPDATE notifications SET rescinded = 1 WHERE id IN (${rescindedIDs}) `; return await dbQuery(rescindQuery); })(); const [deliveryResults, dbIDs] = await Promise.all([ promiseAll(deliveryPromises), dbIDsPromise, rescindPromise, ]); const newNotifRows = []; if (numRescinds > 0) { invariant(dbIDs, 'dbIDs should be set'); for (const rescindedID in deliveryResults) { const delivery: RescindDelivery = { source: 'rescind', rescindedID, }; const { errors } = deliveryResults[rescindedID]; if (errors) { delivery.errors = errors; } const dbID = dbIDs.shift(); const { userID, threadID, messageID } = notifInfo[rescindedID]; newNotifRows.push([ dbID, userID, threadID, messageID, null, JSON.stringify([delivery]), 1, ]); } } if (newNotifRows.length > 0) { const insertQuery = SQL` INSERT INTO notifications (id, user, thread, message, collapse_key, delivery, rescinded) VALUES ${newNotifRows} `; await dbQuery(insertQuery); } } async function getDeviceTokenToCookieID( deviceTokens: Set, ): Promise<{ +[string]: string }> { if (deviceTokens.size === 0) { return {}; } const deviceTokenToCookieID = {}; const fetchCookiesQuery = SQL` SELECT id, device_token FROM cookies WHERE device_token IN (${[...deviceTokens]}) `; const [fetchResult] = await dbQuery(fetchCookiesQuery); for (const row of fetchResult) { deviceTokenToCookieID[row.device_token.toString()] = row.id.toString(); } return deviceTokenToCookieID; } async function conditionallyEncryptNotification( notification: T, codeVersion: ?number, devices: $ReadOnlyArray, encryptCallback: ( devices: $ReadOnlyArray, notification: T, codeVersion?: ?number, ) => Promise< $ReadOnlyArray<{ +notification: T, +cookieID: string, +deviceToken: string, +encryptionOrder?: number, }>, >, ): Promise<$ReadOnlyArray<{ +deviceToken: string, +notification: T }>> { const shouldBeEncrypted = codeVersion && codeVersion >= 233; if (!shouldBeEncrypted) { return devices.map(({ deviceToken }) => ({ notification, deviceToken, })); } const notifications = await encryptCallback( devices, notification, codeVersion, ); return notifications.map(({ deviceToken, notification: notif }) => ({ deviceToken, notification: notif, })); } async function prepareIOSNotification( iosID: string, unreadCount: number, threadID: string, platformDetails: PlatformDetails, devices: $ReadOnlyArray, ): Promise<$ReadOnlyArray> { threadID = validateOutput(platformDetails, tID, threadID); const { codeVersion } = platformDetails; const notification = new apn.Notification(); notification.topic = getAPNsNotificationTopic({ platform: 'ios', codeVersion, }); if (codeVersion && codeVersion > 198) { notification.mutableContent = true; notification.pushType = 'alert'; notification.badge = unreadCount; } else { notification.priority = 5; notification.contentAvailable = true; notification.pushType = 'background'; } notification.payload = codeVersion && codeVersion > 135 ? { backgroundNotifType: 'CLEAR', notificationId: iosID, setUnreadStatus: true, threadID, } : { managedAps: { action: 'CLEAR', notificationId: iosID, }, }; return await conditionallyEncryptNotification( notification, codeVersion, devices, prepareEncryptedIOSNotificationRescind, ); } async function prepareAndroidNotification( notifID: string, unreadCount: number, threadID: string, platformDetails: PlatformDetails, devices: $ReadOnlyArray, ): Promise<$ReadOnlyArray> { threadID = validateOutput(platformDetails, tID, threadID); const { codeVersion } = platformDetails; const notification = { data: { badge: unreadCount.toString(), rescind: 'true', rescindID: notifID, setUnreadStatus: 'true', threadID, }, }; return await conditionallyEncryptNotification( notification, codeVersion, devices, prepareEncryptedAndroidNotificationRescinds, ); } export { rescindPushNotifs }; diff --git a/keyserver/src/push/send.js b/keyserver/src/push/send.js index 31c5460fd..085aff171 100644 --- a/keyserver/src/push/send.js +++ b/keyserver/src/push/send.js @@ -1,1733 +1,1736 @@ // @flow import apn from '@parse/node-apn'; import type { ResponseFailure } from '@parse/node-apn'; import invariant from 'invariant'; import _cloneDeep from 'lodash/fp/cloneDeep.js'; import _flow from 'lodash/fp/flow.js'; import _groupBy from 'lodash/fp/groupBy.js'; import _mapValues from 'lodash/fp/mapValues.js'; import _pickBy from 'lodash/fp/pickBy.js'; import type { QueryResults } from 'mysql'; import t from 'tcomb'; import uuidv4 from 'uuid/v4.js'; import { oldValidUsernameRegex } from 'lib/shared/account-utils.js'; import { isUserMentioned } from 'lib/shared/mention-utils.js'; import { createMessageInfo, sortMessageInfoList, shimUnsupportedRawMessageInfos, } from 'lib/shared/message-utils.js'; import { messageSpecs } from 'lib/shared/messages/message-specs.js'; import { notifTextsForMessageInfo } from 'lib/shared/notif-utils.js'; import { isStaff } from 'lib/shared/staff-utils.js'; import { rawThreadInfoFromServerThreadInfo, threadInfoFromRawThreadInfo, } from 'lib/shared/thread-utils.js'; import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; import type { Platform, PlatformDetails } from 'lib/types/device-types.js'; import { messageTypes } from 'lib/types/message-types-enum.js'; import { type RawMessageInfo, type MessageData, } from 'lib/types/message-types.js'; import { rawMessageInfoValidator } from 'lib/types/message-types.js'; import type { WNSNotification, ResolvedNotifTexts, } from 'lib/types/notif-types.js'; import { resolvedNotifTextsValidator } from 'lib/types/notif-types.js'; import type { ServerThreadInfo, ThreadInfo } from 'lib/types/thread-types.js'; import { updateTypes } from 'lib/types/update-types-enum.js'; import { type GlobalUserInfo } from 'lib/types/user-types.js'; import { isDev } from 'lib/utils/dev-utils.js'; import { values } from 'lib/utils/objects.js'; import { tID, tPlatformDetails, tShape } from 'lib/utils/validation-utils.js'; import { prepareEncryptedIOSNotifications, prepareEncryptedAndroidNotifications, prepareEncryptedWebNotifications, } from './crypto.js'; import { getAPNsNotificationTopic } from './providers.js'; import { rescindPushNotifs } from './rescind.js'; import type { NotificationTargetDevice, TargetedAPNsNotification, TargetedAndroidNotification, TargetedWebNotification, TargetedWNSNotification, } from './types.js'; import { apnPush, fcmPush, getUnreadCounts, apnMaxNotificationPayloadByteSize, fcmMaxNotificationPayloadByteSize, wnsMaxNotificationPayloadByteSize, webPush, wnsPush, type WebPushError, type WNSPushError, } from './utils.js'; import createIDs from '../creators/id-creator.js'; import { createUpdates } from '../creators/update-creator.js'; import { dbQuery, SQL, mergeOrConditions } from '../database/database.js'; import type { CollapsableNotifInfo } from '../fetchers/message-fetchers.js'; import { fetchCollapsableNotifs } from '../fetchers/message-fetchers.js'; import { fetchServerThreadInfos } from '../fetchers/thread-fetchers.js'; import { fetchUserInfos } from '../fetchers/user-fetchers.js'; import type { Viewer } from '../session/viewer.js'; import { getENSNames } from '../utils/ens-cache.js'; import { validateOutput } from '../utils/validation-utils.js'; export type Device = { +platform: Platform, +deviceToken: string, +cookieID: string, +codeVersion: ?number, +stateVersion: ?number, }; export type PushUserInfo = { +devices: Device[], // messageInfos and messageDatas have the same key +messageInfos: RawMessageInfo[], +messageDatas: MessageData[], }; type Delivery = PushDelivery | { collapsedInto: string }; type NotificationRow = { +dbID: string, +userID: string, +threadID?: ?string, +messageID?: ?string, +collapseKey?: ?string, +deliveries: Delivery[], }; export type PushInfo = { [userID: string]: PushUserInfo }; async function sendPushNotifs(pushInfo: PushInfo) { if (Object.keys(pushInfo).length === 0) { return; } const [ unreadCounts, { usersToCollapsableNotifInfo, serverThreadInfos, userInfos }, dbIDs, ] = await Promise.all([ getUnreadCounts(Object.keys(pushInfo)), fetchInfos(pushInfo), createDBIDs(pushInfo), ]); const preparePromises: Array>> = []; const notifications: Map = new Map(); for (const userID in usersToCollapsableNotifInfo) { const threadInfos = _flow( _mapValues((serverThreadInfo: ServerThreadInfo) => { const rawThreadInfo = rawThreadInfoFromServerThreadInfo( serverThreadInfo, userID, ); if (!rawThreadInfo) { return null; } return threadInfoFromRawThreadInfo(rawThreadInfo, userID, userInfos); }), _pickBy(threadInfo => threadInfo), )(serverThreadInfos); for (const notifInfo of usersToCollapsableNotifInfo[userID]) { preparePromises.push( preparePushNotif({ notifInfo, userID, pushUserInfo: pushInfo[userID], unreadCount: unreadCounts[userID], threadInfos, userInfos, dbIDs, rowsToSave: notifications, }), ); } } const prepareResults = await Promise.all(preparePromises); const flattenedPrepareResults = prepareResults.filter(Boolean).flat(); const deliveryResults = await deliverPushNotifsInEncryptionOrder( flattenedPrepareResults, ); const cleanUpPromise = (async () => { if (dbIDs.length === 0) { return; } const query = SQL`DELETE FROM ids WHERE id IN (${dbIDs})`; await dbQuery(query); })(); await Promise.all([ cleanUpPromise, saveNotifResults(deliveryResults, notifications, true), ]); } type PreparePushResult = { +platform: Platform, +notificationInfo: NotificationInfo, +notification: | TargetedAPNsNotification | TargetedAndroidNotification | TargetedWebNotification | TargetedWNSNotification, }; async function preparePushNotif(input: { notifInfo: CollapsableNotifInfo, userID: string, pushUserInfo: PushUserInfo, unreadCount: number, threadInfos: { +[threadID: string]: ThreadInfo }, userInfos: { +[userID: string]: GlobalUserInfo }, dbIDs: string[], // mutable rowsToSave: Map, // mutable }): Promise> { const { notifInfo, userID, pushUserInfo, unreadCount, threadInfos, userInfos, dbIDs, rowsToSave, } = input; const hydrateMessageInfo = (rawMessageInfo: RawMessageInfo) => createMessageInfo(rawMessageInfo, userID, userInfos, threadInfos); const newMessageInfos = []; const newRawMessageInfos = []; for (const newRawMessageInfo of notifInfo.newMessageInfos) { const newMessageInfo = hydrateMessageInfo(newRawMessageInfo); if (newMessageInfo) { newMessageInfos.push(newMessageInfo); newRawMessageInfos.push(newRawMessageInfo); } } if (newMessageInfos.length === 0) { return null; } const existingMessageInfos = notifInfo.existingMessageInfos .map(hydrateMessageInfo) .filter(Boolean); const allMessageInfos = sortMessageInfoList([ ...newMessageInfos, ...existingMessageInfos, ]); const [firstNewMessageInfo, ...remainingNewMessageInfos] = newMessageInfos; const { threadID } = firstNewMessageInfo; const threadInfo = threadInfos[threadID]; const parentThreadInfo = threadInfo.parentThreadID ? threadInfos[threadInfo.parentThreadID] : null; const updateBadge = threadInfo.currentUser.subscription.home; const displayBanner = threadInfo.currentUser.subscription.pushNotifs; const username = userInfos[userID] && userInfos[userID].username; const userWasMentioned = username && threadInfo.currentUser.role && oldValidUsernameRegex.test(username) && newMessageInfos.some(newMessageInfo => { const unwrappedMessageInfo = newMessageInfo.type === messageTypes.SIDEBAR_SOURCE ? newMessageInfo.sourceMessage : newMessageInfo; return ( unwrappedMessageInfo.type === messageTypes.TEXT && isUserMentioned(username, unwrappedMessageInfo.text) ); }); if (!updateBadge && !displayBanner && !userWasMentioned) { return null; } const badgeOnly = !displayBanner && !userWasMentioned; const notifTargetUserInfo = { id: userID, username }; const notifTexts = await notifTextsForMessageInfo( allMessageInfos, threadInfo, parentThreadInfo, notifTargetUserInfo, getENSNames, ); if (!notifTexts) { return null; } const dbID = dbIDs.shift(); invariant(dbID, 'should have sufficient DB IDs'); const byPlatform = getDevicesByPlatform(pushUserInfo.devices); const firstMessageID = firstNewMessageInfo.id; invariant(firstMessageID, 'RawMessageInfo.id should be set on server'); const notificationInfo = { source: 'new_message', dbID, userID, threadID, messageID: firstMessageID, collapseKey: notifInfo.collapseKey, }; const preparePromises: Array>> = []; const iosVersionsToTokens = byPlatform.get('ios'); if (iosVersionsToTokens) { for (const [versionKey, devices] of iosVersionsToTokens) { const { codeVersion, stateVersion } = stringToVersionKey(versionKey); const platformDetails: PlatformDetails = { platform: 'ios', codeVersion, stateVersion, }; const shimmedNewRawMessageInfos = shimUnsupportedRawMessageInfos( newRawMessageInfos, platformDetails, ); const preparePromise: Promise<$ReadOnlyArray> = (async () => { const targetedNotifications = await prepareAPNsNotification( { notifTexts, newRawMessageInfos: shimmedNewRawMessageInfos, threadID: threadInfo.id, collapseKey: notifInfo.collapseKey, badgeOnly, unreadCount, platformDetails, }, devices, ); return targetedNotifications.map(notification => ({ notification, platform: 'ios', notificationInfo: { ...notificationInfo, codeVersion, stateVersion, }, })); })(); preparePromises.push(preparePromise); } } const androidVersionsToTokens = byPlatform.get('android'); if (androidVersionsToTokens) { for (const [versionKey, devices] of androidVersionsToTokens) { const { codeVersion, stateVersion } = stringToVersionKey(versionKey); const platformDetails = { platform: 'android', codeVersion, stateVersion, }; const shimmedNewRawMessageInfos = shimUnsupportedRawMessageInfos( newRawMessageInfos, platformDetails, ); const preparePromise: Promise<$ReadOnlyArray> = (async () => { const targetedNotifications = await prepareAndroidNotification( { notifTexts, newRawMessageInfos: shimmedNewRawMessageInfos, threadID: threadInfo.id, collapseKey: notifInfo.collapseKey, badgeOnly, unreadCount, platformDetails, dbID, }, devices, ); return targetedNotifications.map(notification => ({ notification, platform: 'android', notificationInfo: { ...notificationInfo, codeVersion, stateVersion, }, })); })(); preparePromises.push(preparePromise); } } const webVersionsToTokens = byPlatform.get('web'); if (webVersionsToTokens) { for (const [versionKey, devices] of webVersionsToTokens) { const { codeVersion, stateVersion } = stringToVersionKey(versionKey); const platformDetails = { platform: 'web', codeVersion, stateVersion, }; const preparePromise: Promise<$ReadOnlyArray> = (async () => { const targetedNotifications = await prepareWebNotification( userID, { notifTexts, threadID: threadInfo.id, unreadCount, platformDetails, }, devices, ); return targetedNotifications.map(notification => ({ notification, platform: 'web', notificationInfo: { ...notificationInfo, codeVersion, stateVersion, }, })); })(); preparePromises.push(preparePromise); } } const macosVersionsToTokens = byPlatform.get('macos'); if (macosVersionsToTokens) { for (const [versionKey, devices] of macosVersionsToTokens) { const { codeVersion, stateVersion } = stringToVersionKey(versionKey); const platformDetails = { platform: 'macos', codeVersion, stateVersion, }; const shimmedNewRawMessageInfos = shimUnsupportedRawMessageInfos( newRawMessageInfos, platformDetails, ); const preparePromise: Promise<$ReadOnlyArray> = (async () => { const targetedNotifications = await prepareAPNsNotification( { notifTexts, newRawMessageInfos: shimmedNewRawMessageInfos, threadID: threadInfo.id, collapseKey: notifInfo.collapseKey, badgeOnly, unreadCount, platformDetails, }, devices, ); return targetedNotifications.map(notification => ({ notification, platform: 'macos', notificationInfo: { ...notificationInfo, codeVersion, stateVersion, }, })); })(); preparePromises.push(preparePromise); } } const windowsVersionsToTokens = byPlatform.get('windows'); if (windowsVersionsToTokens) { for (const [versionKey, devices] of windowsVersionsToTokens) { const { codeVersion, stateVersion } = stringToVersionKey(versionKey); const platformDetails = { platform: 'windows', codeVersion, stateVersion, }; const preparePromise: Promise<$ReadOnlyArray> = (async () => { const notification = await prepareWNSNotification({ notifTexts, threadID: threadInfo.id, unreadCount, platformDetails, }); return devices.map(({ deviceToken }) => ({ notification: ({ deviceToken, notification, }: TargetedWNSNotification), platform: 'windows', notificationInfo: { ...notificationInfo, codeVersion, stateVersion, }, })); })(); preparePromises.push(preparePromise); } } for (const newMessageInfo of remainingNewMessageInfos) { const newDBID = dbIDs.shift(); invariant(newDBID, 'should have sufficient DB IDs'); const messageID = newMessageInfo.id; invariant(messageID, 'RawMessageInfo.id should be set on server'); rowsToSave.set(newDBID, { dbID: newDBID, userID, threadID: newMessageInfo.threadID, messageID, collapseKey: notifInfo.collapseKey, deliveries: [{ collapsedInto: dbID }], }); } const prepareResults = await Promise.all(preparePromises); return prepareResults.flat(); } // For better readability we don't differentiate between // encrypted and unencrypted notifs and order them together function compareEncryptionOrder( pushNotif1: PreparePushResult, pushNotif2: PreparePushResult, ): number { const order1 = pushNotif1.notification.encryptionOrder ?? 0; const order2 = pushNotif2.notification.encryptionOrder ?? 0; return order1 - order2; } async function deliverPushNotifsInEncryptionOrder( preparedPushNotifs: $ReadOnlyArray, ): Promise<$ReadOnlyArray> { const deliveryPromises: Array>> = []; const groupedByDevice = _groupBy( preparedPushNotif => preparedPushNotif.deviceToken, )(preparedPushNotifs); for (const preparedPushNotifsForDevice of values(groupedByDevice)) { const orderedPushNotifsForDevice = preparedPushNotifsForDevice.sort( compareEncryptionOrder, ); const deviceDeliveryPromise = (async () => { const deliveries = []; for (const preparedPushNotif of orderedPushNotifsForDevice) { const { platform, notification, notificationInfo } = preparedPushNotif; let delivery: PushResult; if (platform === 'ios' || platform === 'macos') { delivery = await sendAPNsNotification( platform, [notification], notificationInfo, ); } else if (platform === 'android') { delivery = await sendAndroidNotification( [notification], notificationInfo, ); } else if (platform === 'web') { delivery = await sendWebNotifications( [notification], notificationInfo, ); } else if (platform === 'windows') { delivery = await sendWNSNotification( [notification], notificationInfo, ); } if (delivery) { deliveries.push(delivery); } } return deliveries; })(); deliveryPromises.push(deviceDeliveryPromise); } const deliveryResults = await Promise.all(deliveryPromises); return deliveryResults.flat(); } async function sendRescindNotifs(rescindInfo: PushInfo) { if (Object.keys(rescindInfo).length === 0) { return; } const usersToCollapsableNotifInfo = await fetchCollapsableNotifs(rescindInfo); const promises = []; for (const userID in usersToCollapsableNotifInfo) { for (const notifInfo of usersToCollapsableNotifInfo[userID]) { for (const existingMessageInfo of notifInfo.existingMessageInfos) { const rescindCondition = SQL` n.user = ${userID} AND n.thread = ${existingMessageInfo.threadID} AND n.message = ${existingMessageInfo.id} `; promises.push(rescindPushNotifs(rescindCondition)); } } } await Promise.all(promises); } // The results in deliveryResults will be combined with the rows // in rowsToSave and then written to the notifications table async function saveNotifResults( deliveryResults: $ReadOnlyArray, inputRowsToSave: Map, rescindable: boolean, ) { const rowsToSave = new Map(inputRowsToSave); const allInvalidTokens = []; for (const deliveryResult of deliveryResults) { const { info, delivery, invalidTokens } = deliveryResult; const { dbID, userID } = info; const curNotifRow = rowsToSave.get(dbID); if (curNotifRow) { curNotifRow.deliveries.push(delivery); } else { // Ternary expressions for Flow const threadID = info.threadID ? info.threadID : null; const messageID = info.messageID ? info.messageID : null; const collapseKey = info.collapseKey ? info.collapseKey : null; rowsToSave.set(dbID, { dbID, userID, threadID, messageID, collapseKey, deliveries: [delivery], }); } if (invalidTokens) { allInvalidTokens.push({ userID, tokens: invalidTokens, }); } } const notificationRows = []; for (const notification of rowsToSave.values()) { notificationRows.push([ notification.dbID, notification.userID, notification.threadID, notification.messageID, notification.collapseKey, JSON.stringify(notification.deliveries), Number(!rescindable), ]); } const dbPromises: Array> = []; if (allInvalidTokens.length > 0) { dbPromises.push(removeInvalidTokens(allInvalidTokens)); } if (notificationRows.length > 0) { const query = SQL` INSERT INTO notifications (id, user, thread, message, collapse_key, delivery, rescinded) VALUES ${notificationRows} `; dbPromises.push(dbQuery(query)); } if (dbPromises.length > 0) { await Promise.all(dbPromises); } } async function fetchInfos(pushInfo: PushInfo) { const usersToCollapsableNotifInfo = await fetchCollapsableNotifs(pushInfo); - const threadIDs = new Set(); - const threadWithChangedNamesToMessages = new Map(); + const threadIDs = new Set(); + const threadWithChangedNamesToMessages = new Map>(); const addThreadIDsFromMessageInfos = (rawMessageInfo: RawMessageInfo) => { const threadID = rawMessageInfo.threadID; threadIDs.add(threadID); const messageSpec = messageSpecs[rawMessageInfo.type]; if (messageSpec.threadIDs) { for (const id of messageSpec.threadIDs(rawMessageInfo)) { threadIDs.add(id); } } if ( rawMessageInfo.type === messageTypes.CHANGE_SETTINGS && rawMessageInfo.field === 'name' ) { const messages = threadWithChangedNamesToMessages.get(threadID); if (messages) { messages.push(rawMessageInfo.id); } else { threadWithChangedNamesToMessages.set(threadID, [rawMessageInfo.id]); } } }; for (const userID in usersToCollapsableNotifInfo) { for (const notifInfo of usersToCollapsableNotifInfo[userID]) { for (const rawMessageInfo of notifInfo.existingMessageInfos) { addThreadIDsFromMessageInfos(rawMessageInfo); } for (const rawMessageInfo of notifInfo.newMessageInfos) { addThreadIDsFromMessageInfos(rawMessageInfo); } } } // These threadInfos won't have currentUser set const threadPromise = fetchServerThreadInfos({ threadIDs }); const oldNamesPromise: Promise = (async () => { if (threadWithChangedNamesToMessages.size === 0) { return undefined; } const typesThatAffectName = [ messageTypes.CHANGE_SETTINGS, messageTypes.CREATE_THREAD, ]; const oldNameQuery = SQL` SELECT IF( JSON_TYPE(JSON_EXTRACT(m.content, "$.name")) = 'NULL', "", JSON_UNQUOTE(JSON_EXTRACT(m.content, "$.name")) ) AS name, m.thread FROM ( SELECT MAX(id) AS id FROM messages WHERE type IN (${typesThatAffectName}) AND JSON_EXTRACT(content, "$.name") IS NOT NULL AND`; const threadClauses = []; for (const [threadID, messages] of threadWithChangedNamesToMessages) { threadClauses.push( SQL`(thread = ${threadID} AND id NOT IN (${messages}))`, ); } oldNameQuery.append(mergeOrConditions(threadClauses)); oldNameQuery.append(SQL` GROUP BY thread ) x LEFT JOIN messages m ON m.id = x.id `); return await dbQuery(oldNameQuery); })(); const [threadResult, oldNames] = await Promise.all([ threadPromise, oldNamesPromise, ]); const serverThreadInfos = threadResult.threadInfos; if (oldNames) { const [result] = oldNames; for (const row of result) { const threadID = row.thread.toString(); serverThreadInfos[threadID].name = row.name; } } const userInfos = await fetchNotifUserInfos( serverThreadInfos, usersToCollapsableNotifInfo, ); return { usersToCollapsableNotifInfo, serverThreadInfos, userInfos }; } async function fetchNotifUserInfos( serverThreadInfos: { +[threadID: string]: ServerThreadInfo }, usersToCollapsableNotifInfo: { +[userID: string]: CollapsableNotifInfo[] }, ) { - const missingUserIDs = new Set(); + const missingUserIDs = new Set(); for (const threadID in serverThreadInfos) { const serverThreadInfo = serverThreadInfos[threadID]; for (const member of serverThreadInfo.members) { missingUserIDs.add(member.id); } } const addUserIDsFromMessageInfos = (rawMessageInfo: RawMessageInfo) => { missingUserIDs.add(rawMessageInfo.creatorID); const userIDs = messageSpecs[rawMessageInfo.type].userIDs?.(rawMessageInfo) ?? []; for (const userID of userIDs) { missingUserIDs.add(userID); } }; for (const userID in usersToCollapsableNotifInfo) { missingUserIDs.add(userID); for (const notifInfo of usersToCollapsableNotifInfo[userID]) { for (const rawMessageInfo of notifInfo.existingMessageInfos) { addUserIDsFromMessageInfos(rawMessageInfo); } for (const rawMessageInfo of notifInfo.newMessageInfos) { addUserIDsFromMessageInfos(rawMessageInfo); } } } return await fetchUserInfos([...missingUserIDs]); } async function createDBIDs(pushInfo: PushInfo): Promise { let numIDsNeeded = 0; for (const userID in pushInfo) { numIDsNeeded += pushInfo[userID].messageInfos.length; } return await createIDs('notifications', numIDsNeeded); } type VersionKey = { codeVersion: number, stateVersion: number }; const versionKeyRegex: RegExp = new RegExp(/^-?\d+\|-?\d+$/); function versionKeyToString(versionKey: VersionKey): string { return `${versionKey.codeVersion}|${versionKey.stateVersion}`; } function stringToVersionKey(versionKeyString: string): VersionKey { invariant( versionKeyRegex.test(versionKeyString), 'should pass correct version key string', ); const [codeVersion, stateVersion] = versionKeyString.split('|').map(Number); return { codeVersion, stateVersion }; } function getDevicesByPlatform( devices: $ReadOnlyArray, ): Map>> { - const byPlatform = new Map(); + const byPlatform = new Map< + Platform, + Map>, + >(); for (const device of devices) { let innerMap = byPlatform.get(device.platform); if (!innerMap) { - innerMap = new Map(); + innerMap = new Map>(); byPlatform.set(device.platform, innerMap); } const codeVersion: number = device.codeVersion !== null && device.codeVersion !== undefined && device.platform !== 'windows' && device.platform !== 'macos' ? device.codeVersion : -1; const stateVersion: number = device.stateVersion ?? -1; const versionKey = versionKeyToString({ codeVersion, stateVersion, }); let innerMostArrayTmp: ?Array = innerMap.get(versionKey); if (!innerMostArrayTmp) { innerMostArrayTmp = []; innerMap.set(versionKey, innerMostArrayTmp); } const innerMostArray = innerMostArrayTmp; innerMostArray.push({ cookieID: device.cookieID, deviceToken: device.deviceToken, }); } return byPlatform; } type APNsNotifInputData = { +notifTexts: ResolvedNotifTexts, +newRawMessageInfos: RawMessageInfo[], +threadID: string, +collapseKey: ?string, +badgeOnly: boolean, +unreadCount: number, +platformDetails: PlatformDetails, }; const apnsNotifInputDataValidator = tShape({ notifTexts: resolvedNotifTextsValidator, newRawMessageInfos: t.list(rawMessageInfoValidator), threadID: tID, collapseKey: t.maybe(t.String), badgeOnly: t.Boolean, unreadCount: t.Number, platformDetails: tPlatformDetails, }); async function prepareAPNsNotification( inputData: APNsNotifInputData, devices: $ReadOnlyArray, ): Promise<$ReadOnlyArray> { const convertedData = validateOutput( inputData.platformDetails, apnsNotifInputDataValidator, inputData, ); const { notifTexts, newRawMessageInfos, threadID, collapseKey, badgeOnly, unreadCount, platformDetails, } = convertedData; const canDecryptNonCollapsibleTextNotifs = platformDetails.codeVersion && platformDetails.codeVersion > 222; const isNonCollapsibleTextNotification = newRawMessageInfos.every( newRawMessageInfo => newRawMessageInfo.type === messageTypes.TEXT, ) && !collapseKey; const canDecryptAllNotifTypes = platformDetails.codeVersion && platformDetails.codeVersion >= 267; const shouldBeEncrypted = platformDetails.platform === 'ios' && (canDecryptAllNotifTypes || (isNonCollapsibleTextNotification && canDecryptNonCollapsibleTextNotifs)); const uniqueID = uuidv4(); const notification = new apn.Notification(); notification.topic = getAPNsNotificationTopic(platformDetails); const { merged, ...rest } = notifTexts; // We don't include alert's body on macos because we // handle displaying the notification ourselves and // we don't want macOS to display it automatically. if (!badgeOnly && platformDetails.platform !== 'macos') { notification.body = merged; notification.sound = 'default'; } notification.payload = { ...notification.payload, ...rest, }; notification.badge = unreadCount; notification.threadId = threadID; notification.id = uniqueID; notification.pushType = 'alert'; notification.payload.id = uniqueID; notification.payload.threadID = threadID; if (platformDetails.codeVersion && platformDetails.codeVersion > 198) { notification.mutableContent = true; } if (collapseKey && canDecryptAllNotifTypes) { notification.payload.collapseID = collapseKey; } else if (collapseKey) { notification.collapseId = collapseKey; } const messageInfos = JSON.stringify(newRawMessageInfos); // We make a copy before checking notification's length, because calling // length compiles the notification and makes it immutable. Further // changes to its properties won't be reflected in the final plaintext // data that is sent. const copyWithMessageInfos = _cloneDeep(notification); copyWithMessageInfos.payload = { ...copyWithMessageInfos.payload, messageInfos, }; const notificationSizeValidator = notif => notif.length() <= apnMaxNotificationPayloadByteSize; if (!shouldBeEncrypted) { const notificationToSend = notificationSizeValidator( _cloneDeep(copyWithMessageInfos), ) ? copyWithMessageInfos : notification; return devices.map(({ deviceToken }) => ({ notification: notificationToSend, deviceToken, })); } const notifsWithMessageInfos = await prepareEncryptedIOSNotifications( devices, copyWithMessageInfos, platformDetails.codeVersion, notificationSizeValidator, ); const devicesWithExcessiveSize = notifsWithMessageInfos .filter(({ payloadSizeExceeded }) => payloadSizeExceeded) .map(({ deviceToken, cookieID }) => ({ deviceToken, cookieID })); if (devicesWithExcessiveSize.length === 0) { return notifsWithMessageInfos.map( ({ notification: notif, deviceToken, encryptedPayloadHash, encryptionOrder, }) => ({ notification: notif, deviceToken, encryptedPayloadHash, encryptionOrder, }), ); } const notifsWithoutMessageInfos = await prepareEncryptedIOSNotifications( devicesWithExcessiveSize, notification, platformDetails.codeVersion, ); const targetedNotifsWithMessageInfos = notifsWithMessageInfos .filter(({ payloadSizeExceeded }) => !payloadSizeExceeded) .map( ({ notification: notif, deviceToken, encryptedPayloadHash, encryptionOrder, }) => ({ notification: notif, deviceToken, encryptedPayloadHash, encryptionOrder, }), ); const targetedNotifsWithoutMessageInfos = notifsWithoutMessageInfos.map( ({ notification: notif, deviceToken, encryptedPayloadHash, encryptionOrder, }) => ({ notification: notif, deviceToken, encryptedPayloadHash, encryptionOrder, }), ); return [ ...targetedNotifsWithMessageInfos, ...targetedNotifsWithoutMessageInfos, ]; } type AndroidNotifInputData = { ...APNsNotifInputData, +dbID: string, }; const androidNotifInputDataValidator = tShape({ ...apnsNotifInputDataValidator.meta.props, dbID: t.String, }); async function prepareAndroidNotification( inputData: AndroidNotifInputData, devices: $ReadOnlyArray, ): Promise<$ReadOnlyArray> { const convertedData = validateOutput( inputData.platformDetails, androidNotifInputDataValidator, inputData, ); const { notifTexts, newRawMessageInfos, threadID, collapseKey, badgeOnly, unreadCount, platformDetails: { codeVersion }, dbID, } = convertedData; const canDecryptNonCollapsibleTextNotifs = codeVersion && codeVersion > 228; const isNonCollapsibleTextNotif = newRawMessageInfos.every( newRawMessageInfo => newRawMessageInfo.type === messageTypes.TEXT, ) && !collapseKey; const canDecryptAllNotifTypes = codeVersion && codeVersion >= 267; const shouldBeEncrypted = canDecryptAllNotifTypes || (canDecryptNonCollapsibleTextNotifs && isNonCollapsibleTextNotif); const { merged, ...rest } = notifTexts; const notification = { data: { badge: unreadCount.toString(), ...rest, threadID, }, }; let notifID; if (collapseKey && canDecryptAllNotifTypes) { notifID = dbID; notification.data = { ...notification.data, collapseKey, }; } else if (collapseKey) { notifID = collapseKey; } else { notifID = dbID; } // The reason we only include `badgeOnly` for newer clients is because older // clients don't know how to parse it. The reason we only include `id` for // newer clients is that if the older clients see that field, they assume // the notif has a full payload, and then crash when trying to parse it. // By skipping `id` we allow old clients to still handle in-app notifs and // badge updating. if (!badgeOnly || (codeVersion && codeVersion >= 69)) { notification.data = { ...notification.data, id: notifID, badgeOnly: badgeOnly ? '1' : '0', }; } const messageInfos = JSON.stringify(newRawMessageInfos); const copyWithMessageInfos = { ...notification, data: { ...notification.data, messageInfos }, }; if (!shouldBeEncrypted) { const notificationToSend = Buffer.byteLength(JSON.stringify(copyWithMessageInfos)) <= fcmMaxNotificationPayloadByteSize ? copyWithMessageInfos : notification; return devices.map(({ deviceToken }) => ({ notification: notificationToSend, deviceToken, })); } const notificationsSizeValidator = notif => { const serializedNotif = JSON.stringify(notif); return ( !serializedNotif || Buffer.byteLength(serializedNotif) <= fcmMaxNotificationPayloadByteSize ); }; const notifsWithMessageInfos = await prepareEncryptedAndroidNotifications( devices, copyWithMessageInfos, notificationsSizeValidator, ); const devicesWithExcessiveSize = notifsWithMessageInfos .filter(({ payloadSizeExceeded }) => payloadSizeExceeded) .map(({ cookieID, deviceToken }) => ({ cookieID, deviceToken })); if (devicesWithExcessiveSize.length === 0) { return notifsWithMessageInfos.map( ({ notification: notif, deviceToken, encryptionOrder }) => ({ notification: notif, deviceToken, encryptionOrder, }), ); } const notifsWithoutMessageInfos = await prepareEncryptedAndroidNotifications( devicesWithExcessiveSize, notification, ); const targetedNotifsWithMessageInfos = notifsWithMessageInfos .filter(({ payloadSizeExceeded }) => !payloadSizeExceeded) .map(({ notification: notif, deviceToken, encryptionOrder }) => ({ notification: notif, deviceToken, encryptionOrder, })); const targetedNotifsWithoutMessageInfos = notifsWithoutMessageInfos.map( ({ notification: notif, deviceToken, encryptionOrder }) => ({ notification: notif, deviceToken, encryptionOrder, }), ); return [ ...targetedNotifsWithMessageInfos, ...targetedNotifsWithoutMessageInfos, ]; } type WebNotifInputData = { +notifTexts: ResolvedNotifTexts, +threadID: string, +unreadCount: number, +platformDetails: PlatformDetails, }; const webNotifInputDataValidator = tShape({ notifTexts: resolvedNotifTextsValidator, threadID: tID, unreadCount: t.Number, platformDetails: tPlatformDetails, }); async function prepareWebNotification( userID: string, inputData: WebNotifInputData, devices: $ReadOnlyArray, ): Promise<$ReadOnlyArray> { const convertedData = validateOutput( inputData.platformDetails, webNotifInputDataValidator, inputData, ); const { notifTexts, threadID, unreadCount } = convertedData; const id = uuidv4(); const { merged, ...rest } = notifTexts; const notification = { ...rest, unreadCount, id, threadID, }; const isStaffOrDev = isStaff(userID) || isDev; const shouldBeEncrypted = hasMinCodeVersion(convertedData.platformDetails, { web: 43, }) && isStaffOrDev; if (!shouldBeEncrypted) { return devices.map(({ deviceToken }) => ({ deviceToken, notification })); } return prepareEncryptedWebNotifications(devices, notification); } type WNSNotifInputData = { +notifTexts: ResolvedNotifTexts, +threadID: string, +unreadCount: number, +platformDetails: PlatformDetails, }; const wnsNotifInputDataValidator = tShape({ notifTexts: resolvedNotifTextsValidator, threadID: tID, unreadCount: t.Number, platformDetails: tPlatformDetails, }); async function prepareWNSNotification( inputData: WNSNotifInputData, ): Promise { const convertedData = validateOutput( inputData.platformDetails, wnsNotifInputDataValidator, inputData, ); const { notifTexts, threadID, unreadCount } = convertedData; const { merged, ...rest } = notifTexts; const notification = { ...rest, unreadCount, threadID, }; if ( Buffer.byteLength(JSON.stringify(notification)) > wnsMaxNotificationPayloadByteSize ) { console.warn('WNS notification exceeds size limit'); } return notification; } type NotificationInfo = | { +source: 'new_message', +dbID: string, +userID: string, +threadID: string, +messageID: string, +collapseKey: ?string, +codeVersion: number, +stateVersion: number, } | { +source: 'mark_as_unread' | 'mark_as_read' | 'activity_update', +dbID: string, +userID: string, +codeVersion: number, +stateVersion: number, }; type APNsDelivery = { +source: $PropertyType, +deviceType: 'ios' | 'macos', +iosID: string, +deviceTokens: $ReadOnlyArray, +codeVersion: number, +stateVersion: number, +errors?: $ReadOnlyArray, +encryptedPayloadHashes?: $ReadOnlyArray, +deviceTokensToPayloadHash?: { +[deviceToken: string]: string, }, }; type APNsResult = { info: NotificationInfo, delivery: APNsDelivery, invalidTokens?: $ReadOnlyArray, }; async function sendAPNsNotification( platform: 'ios' | 'macos', targetedNotifications: $ReadOnlyArray, notificationInfo: NotificationInfo, ): Promise { const { source, codeVersion, stateVersion } = notificationInfo; const response = await apnPush({ targetedNotifications, platformDetails: { platform, codeVersion }, }); invariant( new Set(targetedNotifications.map(({ notification }) => notification.id)) .size === 1, 'Encrypted versions of the same notification must share id value', ); const iosID = targetedNotifications[0].notification.id; const deviceTokens = targetedNotifications.map( ({ deviceToken }) => deviceToken, ); let delivery: APNsDelivery = { source, deviceType: platform, iosID, deviceTokens, codeVersion, stateVersion, }; if (response.errors) { delivery = { ...delivery, errors: response.errors, }; } const deviceTokensToPayloadHash: { [string]: string } = {}; for (const targetedNotification of targetedNotifications) { if (targetedNotification.encryptedPayloadHash) { deviceTokensToPayloadHash[targetedNotification.deviceToken] = targetedNotification.encryptedPayloadHash; } } if (Object.keys(deviceTokensToPayloadHash).length !== 0) { delivery = { ...delivery, deviceTokensToPayloadHash, }; } const result: APNsResult = { info: notificationInfo, delivery, }; if (response.invalidTokens) { result.invalidTokens = response.invalidTokens; } return result; } type PushResult = AndroidResult | APNsResult | WebResult | WNSResult; type PushDelivery = AndroidDelivery | APNsDelivery | WebDelivery | WNSDelivery; type AndroidDelivery = { source: $PropertyType, deviceType: 'android', androidIDs: $ReadOnlyArray, deviceTokens: $ReadOnlyArray, codeVersion: number, stateVersion: number, errors?: $ReadOnlyArray, }; type AndroidResult = { info: NotificationInfo, delivery: AndroidDelivery, invalidTokens?: $ReadOnlyArray, }; async function sendAndroidNotification( targetedNotifications: $ReadOnlyArray, notificationInfo: NotificationInfo, ): Promise { const collapseKey = notificationInfo.collapseKey ? notificationInfo.collapseKey : null; // for Flow... const { source, codeVersion, stateVersion } = notificationInfo; const response = await fcmPush({ targetedNotifications, collapseKey, codeVersion, }); const deviceTokens = targetedNotifications.map( ({ deviceToken }) => deviceToken, ); const androidIDs = response.fcmIDs ? response.fcmIDs : []; const delivery: AndroidDelivery = { source, deviceType: 'android', androidIDs, deviceTokens, codeVersion, stateVersion, }; if (response.errors) { delivery.errors = response.errors; } const result: AndroidResult = { info: notificationInfo, delivery, }; if (response.invalidTokens) { result.invalidTokens = response.invalidTokens; } return result; } type WebDelivery = { +source: $PropertyType, +deviceType: 'web', +deviceTokens: $ReadOnlyArray, +codeVersion?: number, +stateVersion: number, +errors?: $ReadOnlyArray, }; type WebResult = { +info: NotificationInfo, +delivery: WebDelivery, +invalidTokens?: $ReadOnlyArray, }; async function sendWebNotifications( targetedNotifications: $ReadOnlyArray, notificationInfo: NotificationInfo, ): Promise { const { source, codeVersion, stateVersion } = notificationInfo; const response = await webPush(targetedNotifications); const deviceTokens = targetedNotifications.map( ({ deviceToken }) => deviceToken, ); const delivery: WebDelivery = { source, deviceType: 'web', deviceTokens, codeVersion, errors: response.errors, stateVersion, }; const result: WebResult = { info: notificationInfo, delivery, invalidTokens: response.invalidTokens, }; return result; } type WNSDelivery = { +source: $PropertyType, +deviceType: 'windows', +wnsIDs: $ReadOnlyArray, +deviceTokens: $ReadOnlyArray, +codeVersion?: number, +stateVersion: number, +errors?: $ReadOnlyArray, }; type WNSResult = { +info: NotificationInfo, +delivery: WNSDelivery, +invalidTokens?: $ReadOnlyArray, }; async function sendWNSNotification( targetedNotifications: $ReadOnlyArray, notificationInfo: NotificationInfo, ): Promise { const { source, codeVersion, stateVersion } = notificationInfo; const response = await wnsPush(targetedNotifications); const deviceTokens = targetedNotifications.map( ({ deviceToken }) => deviceToken, ); const wnsIDs = response.wnsIDs ?? []; const delivery: WNSDelivery = { source, deviceType: 'windows', wnsIDs, deviceTokens, codeVersion, errors: response.errors, stateVersion, }; const result: WNSResult = { info: notificationInfo, delivery, invalidTokens: response.invalidTokens, }; return result; } type InvalidToken = { +userID: string, +tokens: $ReadOnlyArray, }; async function removeInvalidTokens( invalidTokens: $ReadOnlyArray, ): Promise { const sqlTuples = invalidTokens.map( invalidTokenUser => SQL`( user = ${invalidTokenUser.userID} AND device_token IN (${invalidTokenUser.tokens}) )`, ); const sqlCondition = mergeOrConditions(sqlTuples); const selectQuery = SQL` SELECT id, user, device_token FROM cookies WHERE `; selectQuery.append(sqlCondition); const [result] = await dbQuery(selectQuery); - const userCookiePairsToInvalidDeviceTokens = new Map(); + const userCookiePairsToInvalidDeviceTokens = new Map>(); for (const row of result) { const userCookiePair = `${row.user}|${row.id}`; const existing = userCookiePairsToInvalidDeviceTokens.get(userCookiePair); if (existing) { existing.add(row.device_token); } else { userCookiePairsToInvalidDeviceTokens.set( userCookiePair, new Set([row.device_token]), ); } } const time = Date.now(); const promises: Array> = []; for (const entry of userCookiePairsToInvalidDeviceTokens) { const [userCookiePair, deviceTokens] = entry; const [userID, cookieID] = userCookiePair.split('|'); const updateDatas = [...deviceTokens].map(deviceToken => ({ type: updateTypes.BAD_DEVICE_TOKEN, userID, time, deviceToken, targetCookie: cookieID, })); promises.push(createUpdates(updateDatas)); } const updateQuery = SQL` UPDATE cookies SET device_token = NULL WHERE `; updateQuery.append(sqlCondition); promises.push(dbQuery(updateQuery)); await Promise.all(promises); } async function updateBadgeCount( viewer: Viewer, source: 'mark_as_unread' | 'mark_as_read' | 'activity_update', ) { const { userID } = viewer; const deviceTokenQuery = SQL` SELECT platform, device_token, versions, id FROM cookies WHERE user = ${userID} AND device_token IS NOT NULL `; if (viewer.data.cookieID) { deviceTokenQuery.append(SQL`AND id != ${viewer.cookieID} `); } const [unreadCounts, [deviceTokenResult], [dbID]] = await Promise.all([ getUnreadCounts([userID]), dbQuery(deviceTokenQuery), createIDs('notifications', 1), ]); const unreadCount = unreadCounts[userID]; const devices = deviceTokenResult.map(row => { const versions = JSON.parse(row.versions); return { platform: row.platform, cookieID: row.id, deviceToken: row.device_token, codeVersion: versions?.codeVersion, stateVersion: versions?.stateVersion, }; }); const byPlatform = getDevicesByPlatform(devices); const preparePromises: Array>> = []; const iosVersionsToTokens = byPlatform.get('ios'); if (iosVersionsToTokens) { for (const [versionKey, deviceInfos] of iosVersionsToTokens) { const { codeVersion, stateVersion } = stringToVersionKey(versionKey); const notification = new apn.Notification(); notification.topic = getAPNsNotificationTopic({ platform: 'ios', codeVersion, stateVersion, }); notification.badge = unreadCount; notification.pushType = 'alert'; const preparePromise: Promise = (async () => { let targetedNotifications: $ReadOnlyArray; if (codeVersion > 222) { const notificationsArray = await prepareEncryptedIOSNotifications( deviceInfos, notification, codeVersion, ); targetedNotifications = notificationsArray.map( ({ notification: notif, deviceToken, encryptionOrder }) => ({ notification: notif, deviceToken, encryptionOrder, }), ); } else { targetedNotifications = deviceInfos.map(({ deviceToken }) => ({ notification, deviceToken, })); } return targetedNotifications.map(targetedNotification => ({ notification: targetedNotification, platform: 'ios', notificationInfo: { source, dbID, userID, codeVersion, stateVersion, }, })); })(); preparePromises.push(preparePromise); } } const androidVersionsToTokens = byPlatform.get('android'); if (androidVersionsToTokens) { for (const [versionKey, deviceInfos] of androidVersionsToTokens) { const { codeVersion, stateVersion } = stringToVersionKey(versionKey); const notificationData = codeVersion < 69 ? { badge: unreadCount.toString() } : { badge: unreadCount.toString(), badgeOnly: '1' }; const notification = { data: notificationData }; const preparePromise: Promise = (async () => { let targetedNotifications: $ReadOnlyArray; if (codeVersion > 222) { const notificationsArray = await prepareEncryptedAndroidNotifications( deviceInfos, notification, ); targetedNotifications = notificationsArray.map( ({ notification: notif, deviceToken, encryptionOrder }) => ({ notification: notif, deviceToken, encryptionOrder, }), ); } else { targetedNotifications = deviceInfos.map(({ deviceToken }) => ({ deviceToken, notification, })); } return targetedNotifications.map(targetedNotification => ({ notification: targetedNotification, platform: 'android', notificationInfo: { source, dbID, userID, codeVersion, stateVersion, }, })); })(); preparePromises.push(preparePromise); } } const macosVersionsToTokens = byPlatform.get('macos'); if (macosVersionsToTokens) { for (const [versionKey, deviceInfos] of macosVersionsToTokens) { const { codeVersion, stateVersion } = stringToVersionKey(versionKey); const notification = new apn.Notification(); notification.topic = getAPNsNotificationTopic({ platform: 'macos', codeVersion, stateVersion, }); notification.badge = unreadCount; notification.pushType = 'alert'; const preparePromise: Promise = (async () => { return deviceInfos.map(({ deviceToken }) => ({ notification: ({ deviceToken, notification, }: TargetedAPNsNotification), platform: 'macos', notificationInfo: { source, dbID, userID, codeVersion, stateVersion, }, })); })(); preparePromises.push(preparePromise); } } const prepareResults = await Promise.all(preparePromises); const flattenedPrepareResults = prepareResults.filter(Boolean).flat(); const deliveryResults = await deliverPushNotifsInEncryptionOrder( flattenedPrepareResults, ); await saveNotifResults(deliveryResults, new Map(), false); } export { sendPushNotifs, sendRescindNotifs, updateBadgeCount }; diff --git a/keyserver/src/responders/comm-landing-responders.js b/keyserver/src/responders/comm-landing-responders.js index 23469271a..cf4ad1fcf 100644 --- a/keyserver/src/responders/comm-landing-responders.js +++ b/keyserver/src/responders/comm-landing-responders.js @@ -1,34 +1,34 @@ // @flow import type { $Response, $Request } from 'express'; import { type EmailSubscriptionRequest } from 'lib/types/account-types.js'; import { ServerError } from 'lib/utils/errors.js'; import { tShape, tEmail } from 'lib/utils/validation-utils.js'; import { sendEmailSubscriptionRequestToAshoat } from '../emails/subscribe-email-updates.js'; import { checkInputValidator } from '../utils/validation-utils.js'; -const emailSubscriptionInputValidator = tShape({ +const emailSubscriptionInputValidator = tShape({ email: tEmail, }); async function emailSubscriptionResponder( req: $Request, res: $Response, ): Promise { try { if (!req.body || typeof req.body !== 'object') { throw new ServerError('invalid_parameters'); } const input: any = req.body; checkInputValidator(emailSubscriptionInputValidator, input); const subscriptionRequest: EmailSubscriptionRequest = input; await sendEmailSubscriptionRequestToAshoat(subscriptionRequest); res.json({ success: true }); } catch { res.json({ success: false }); } } export { emailSubscriptionResponder }; diff --git a/keyserver/src/responders/user-responders.js b/keyserver/src/responders/user-responders.js index 84ab7eaa6..5b5239439 100644 --- a/keyserver/src/responders/user-responders.js +++ b/keyserver/src/responders/user-responders.js @@ -1,772 +1,778 @@ // @flow import type { Utility as OlmUtility } from '@commapp/olm'; import invariant from 'invariant'; import { ErrorTypes, SiweMessage } from 'siwe'; -import t, { type TInterface, type TUnion } from 'tcomb'; +import t, { type TInterface, type TUnion, type TEnums } from 'tcomb'; import bcrypt from 'twin-bcrypt'; import { baseLegalPolicies, policies, policyTypeValidator, } from 'lib/facts/policies.js'; import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; import type { ResetPasswordRequest, LogOutResponse, RegisterResponse, RegisterRequest, LogInResponse, LogInRequest, UpdatePasswordRequest, UpdateUserSettingsRequest, PolicyAcknowledgmentRequest, ClaimUsernameResponse, } from 'lib/types/account-types.js'; import { userSettingsTypes, notificationTypeValues, logInActionSources, } from 'lib/types/account-types.js'; import { type ClientAvatar, clientAvatarValidator, type UpdateUserAvatarResponse, type UpdateUserAvatarRequest, } from 'lib/types/avatar-types.js'; import type { ReservedUsernameMessage, IdentityKeysBlob, SignedIdentityKeysBlob, } from 'lib/types/crypto-types.js'; +import type { DeviceType } from 'lib/types/device-types'; import { type CalendarQuery, rawEntryInfoValidator, type FetchEntryInfosBase, } from 'lib/types/entry-types.js'; import { defaultNumberPerThread, rawMessageInfoValidator, messageTruncationStatusesValidator, } from 'lib/types/message-types.js'; import type { SIWEAuthRequest, SIWEMessage, SIWESocialProof, } from 'lib/types/siwe-types.js'; import { type SubscriptionUpdateRequest, type SubscriptionUpdateResponse, threadSubscriptionValidator, } from 'lib/types/subscription-types.js'; import { rawThreadInfoValidator } from 'lib/types/thread-types.js'; import { createUpdatesResultValidator } from 'lib/types/update-types.js'; import { type PasswordUpdate, loggedOutUserInfoValidator, loggedInUserInfoValidator, userInfoValidator, } from 'lib/types/user-types.js'; import { identityKeysBlobValidator, signedIdentityKeysBlobValidator, } from 'lib/utils/crypto-utils.js'; import { ServerError } from 'lib/utils/errors.js'; import { values } from 'lib/utils/objects.js'; import { getPublicKeyFromSIWEStatement, isValidSIWEMessage, isValidSIWEStatementWithPublicKey, primaryIdentityPublicKeyRegex, } from 'lib/utils/siwe-utils.js'; import { tShape, tPlatformDetails, tPassword, tEmail, tOldValidUsername, tRegex, tID, } from 'lib/utils/validation-utils.js'; import { entryQueryInputValidator, newEntryQueryInputValidator, normalizeCalendarQuery, verifyCalendarQueryThreadIDs, } from './entry-responders.js'; import { createAccount, processSIWEAccountCreation, } from '../creators/account-creator.js'; import { createOlmSession } from '../creators/olm-session-creator.js'; import { dbQuery, SQL } from '../database/database.js'; import { deleteAccount } from '../deleters/account-deleters.js'; import { deleteCookie } from '../deleters/cookie-deleters.js'; import { checkAndInvalidateSIWENonceEntry } from '../deleters/siwe-nonce-deleters.js'; import { fetchEntryInfos } from '../fetchers/entry-fetchers.js'; import { fetchMessageInfos } from '../fetchers/message-fetchers.js'; import { fetchNotAcknowledgedPolicies } from '../fetchers/policy-acknowledgment-fetchers.js'; import { fetchThreadInfos } from '../fetchers/thread-fetchers.js'; import { fetchKnownUserInfos, fetchLoggedInUserInfo, fetchUserIDForEthereumAddress, fetchUsername, } from '../fetchers/user-fetchers.js'; import { createNewAnonymousCookie, createNewUserCookie, setNewSession, } from '../session/cookies.js'; import type { Viewer } from '../session/viewer.js'; import { accountUpdater, checkAndSendVerificationEmail, checkAndSendPasswordResetEmail, updatePassword, updateUserSettings, updateUserAvatar, } from '../updaters/account-updaters.js'; import { fetchOlmAccount } from '../updaters/olm-account-updater.js'; import { userSubscriptionUpdater } from '../updaters/user-subscription-updaters.js'; import { viewerAcknowledgmentUpdater } from '../updaters/viewer-acknowledgment-updater.js'; import { getOlmUtility } from '../utils/olm-utils.js'; export const subscriptionUpdateRequestInputValidator: TInterface = tShape({ threadID: tID, updatedFields: tShape({ pushNotifs: t.maybe(t.Boolean), home: t.maybe(t.Boolean), }), }); export const subscriptionUpdateResponseValidator: TInterface = tShape({ threadSubscription: threadSubscriptionValidator, }); async function userSubscriptionUpdateResponder( viewer: Viewer, request: SubscriptionUpdateRequest, ): Promise { const threadSubscription = await userSubscriptionUpdater(viewer, request); return { threadSubscription, }; } export const accountUpdateInputValidator: TInterface = tShape({ updatedFields: tShape({ email: t.maybe(tEmail), password: t.maybe(tPassword), }), currentPassword: tPassword, }); async function passwordUpdateResponder( viewer: Viewer, request: PasswordUpdate, ): Promise { await accountUpdater(viewer, request); } async function sendVerificationEmailResponder(viewer: Viewer): Promise { await checkAndSendVerificationEmail(viewer); } export const resetPasswordRequestInputValidator: TInterface = tShape({ usernameOrEmail: t.union([tEmail, tOldValidUsername]), }); async function sendPasswordResetEmailResponder( viewer: Viewer, request: ResetPasswordRequest, ): Promise { await checkAndSendPasswordResetEmail(request); } export const logOutResponseValidator: TInterface = tShape({ currentUserInfo: loggedOutUserInfoValidator, }); async function logOutResponder(viewer: Viewer): Promise { if (viewer.loggedIn) { const [anonymousViewerData] = await Promise.all([ createNewAnonymousCookie({ platformDetails: viewer.platformDetails, deviceToken: viewer.deviceToken, }), deleteCookie(viewer.cookieID), ]); viewer.setNewCookie(anonymousViewerData); } return { currentUserInfo: { anonymous: true, }, }; } async function accountDeletionResponder( viewer: Viewer, ): Promise { const result = await deleteAccount(viewer); invariant(result, 'deleteAccount should return result if handed request'); return result; } -const deviceTokenUpdateRequestInputValidator = tShape({ - deviceType: t.maybe(t.enums.of(['ios', 'android'])), - deviceToken: t.String, -}); +type OldDeviceTokenUpdateRequest = { + +deviceType?: ?DeviceType, + +deviceToken: string, +}; +const deviceTokenUpdateRequestInputValidator = + tShape({ + deviceType: t.maybe(t.enums.of(['ios', 'android'])), + deviceToken: t.String, + }); export const registerRequestInputValidator: TInterface = tShape({ username: t.String, email: t.maybe(tEmail), password: tPassword, calendarQuery: t.maybe(newEntryQueryInputValidator), deviceTokenUpdateRequest: t.maybe(deviceTokenUpdateRequestInputValidator), platformDetails: tPlatformDetails, // We include `primaryIdentityPublicKey` to avoid breaking // old clients, but we no longer do anything with it. primaryIdentityPublicKey: t.maybe(tRegex(primaryIdentityPublicKeyRegex)), signedIdentityKeysBlob: t.maybe(signedIdentityKeysBlobValidator), initialNotificationsEncryptedMessage: t.maybe(t.String), }); export const registerResponseValidator: TInterface = tShape({ id: t.String, rawMessageInfos: t.list(rawMessageInfoValidator), currentUserInfo: loggedInUserInfoValidator, cookieChange: tShape({ threadInfos: t.dict(tID, rawThreadInfoValidator), userInfos: t.list(userInfoValidator), }), }); async function accountCreationResponder( viewer: Viewer, request: RegisterRequest, ): Promise { const { signedIdentityKeysBlob } = request; if (signedIdentityKeysBlob) { const identityKeys: IdentityKeysBlob = JSON.parse( signedIdentityKeysBlob.payload, ); if (!identityKeysBlobValidator.is(identityKeys)) { throw new ServerError('invalid_identity_keys_blob'); } const olmUtil: OlmUtility = getOlmUtility(); try { olmUtil.ed25519_verify( identityKeys.primaryIdentityPublicKeys.ed25519, signedIdentityKeysBlob.payload, signedIdentityKeysBlob.signature, ); } catch (e) { throw new ServerError('invalid_signature'); } } return await createAccount(viewer, request); } type ProcessSuccessfulLoginParams = { +viewer: Viewer, +input: any, +userID: string, +calendarQuery: ?CalendarQuery, +socialProof?: ?SIWESocialProof, +signedIdentityKeysBlob?: ?SignedIdentityKeysBlob, +initialNotificationsEncryptedMessage?: string, }; async function processSuccessfulLogin( params: ProcessSuccessfulLoginParams, ): Promise { const { viewer, input, userID, calendarQuery, socialProof, signedIdentityKeysBlob, initialNotificationsEncryptedMessage, } = params; const request: LogInRequest = input; const newServerTime = Date.now(); const deviceToken = request.deviceTokenUpdateRequest ? request.deviceTokenUpdateRequest.deviceToken : viewer.deviceToken; const [userViewerData, notAcknowledgedPolicies] = await Promise.all([ createNewUserCookie(userID, { platformDetails: request.platformDetails, deviceToken, socialProof, signedIdentityKeysBlob, }), fetchNotAcknowledgedPolicies(userID, baseLegalPolicies), deleteCookie(viewer.cookieID), ]); viewer.setNewCookie(userViewerData); if ( notAcknowledgedPolicies.length && hasMinCodeVersion(viewer.platformDetails, { native: 181 }) ) { const currentUserInfo = await fetchLoggedInUserInfo(viewer); return { notAcknowledgedPolicies, currentUserInfo: currentUserInfo, rawMessageInfos: [], truncationStatuses: {}, userInfos: [], rawEntryInfos: [], serverTime: 0, cookieChange: { threadInfos: {}, userInfos: [], }, }; } if (calendarQuery) { await setNewSession(viewer, calendarQuery, newServerTime); } const olmSessionPromise = (async () => { if ( userViewerData.cookieID && initialNotificationsEncryptedMessage && signedIdentityKeysBlob ) { await createOlmSession( initialNotificationsEncryptedMessage, 'notifications', userViewerData.cookieID, ); } })(); const threadCursors: { [string]: null } = {}; for (const watchedThreadID of request.watchedIDs) { threadCursors[watchedThreadID] = null; } const messageSelectionCriteria = { threadCursors, joinedThreads: true }; const entriesPromise: Promise = (async () => { if (!calendarQuery) { return undefined; } return await fetchEntryInfos(viewer, [calendarQuery]); })(); const [ threadsResult, messagesResult, entriesResult, userInfos, currentUserInfo, ] = await Promise.all([ fetchThreadInfos(viewer), fetchMessageInfos(viewer, messageSelectionCriteria, defaultNumberPerThread), entriesPromise, fetchKnownUserInfos(viewer), fetchLoggedInUserInfo(viewer), olmSessionPromise, ]); const rawEntryInfos = entriesResult ? entriesResult.rawEntryInfos : null; const response: LogInResponse = { currentUserInfo, rawMessageInfos: messagesResult.rawMessageInfos, truncationStatuses: messagesResult.truncationStatuses, serverTime: newServerTime, userInfos: values(userInfos), cookieChange: { threadInfos: threadsResult.threadInfos, userInfos: [], }, }; if (rawEntryInfos) { return { ...response, rawEntryInfos, }; } return response; } export const logInRequestInputValidator: TInterface = tShape({ username: t.maybe(t.String), usernameOrEmail: t.maybe(t.union([tEmail, tOldValidUsername])), password: tPassword, watchedIDs: t.list(tID), calendarQuery: t.maybe(entryQueryInputValidator), deviceTokenUpdateRequest: t.maybe(deviceTokenUpdateRequestInputValidator), platformDetails: tPlatformDetails, source: t.maybe(t.enums.of(values(logInActionSources))), // We include `primaryIdentityPublicKey` to avoid breaking // old clients, but we no longer do anything with it. primaryIdentityPublicKey: t.maybe(tRegex(primaryIdentityPublicKeyRegex)), signedIdentityKeysBlob: t.maybe(signedIdentityKeysBlobValidator), initialNotificationsEncryptedMessage: t.maybe(t.String), }); export const logInResponseValidator: TInterface = tShape({ currentUserInfo: loggedInUserInfoValidator, rawMessageInfos: t.list(rawMessageInfoValidator), truncationStatuses: messageTruncationStatusesValidator, userInfos: t.list(userInfoValidator), rawEntryInfos: t.maybe(t.list(rawEntryInfoValidator)), serverTime: t.Number, cookieChange: tShape({ threadInfos: t.dict(tID, rawThreadInfoValidator), userInfos: t.list(userInfoValidator), }), - notAcknowledgedPolicies: t.maybe(t.list(policyTypeValidator)), + notAcknowledgedPolicies: t.maybe(t.list(policyTypeValidator)), }); async function logInResponder( viewer: Viewer, request: LogInRequest, ): Promise { let identityKeys: ?IdentityKeysBlob; const { signedIdentityKeysBlob, initialNotificationsEncryptedMessage } = request; if (signedIdentityKeysBlob) { identityKeys = JSON.parse(signedIdentityKeysBlob.payload); const olmUtil: OlmUtility = getOlmUtility(); try { olmUtil.ed25519_verify( identityKeys.primaryIdentityPublicKeys.ed25519, signedIdentityKeysBlob.payload, signedIdentityKeysBlob.signature, ); } catch (e) { throw new ServerError('invalid_signature'); } } const calendarQuery = request.calendarQuery ? normalizeCalendarQuery(request.calendarQuery) : null; const verifyCalendarQueryThreadIDsPromise = (async () => { if (calendarQuery) { await verifyCalendarQueryThreadIDs(calendarQuery); } })(); const username = request.username ?? request.usernameOrEmail; if (!username) { if (hasMinCodeVersion(viewer.platformDetails, { native: 150 })) { throw new ServerError('invalid_credentials'); } else { throw new ServerError('invalid_parameters'); } } const userQuery = SQL` SELECT id, hash, username FROM users WHERE LCASE(username) = LCASE(${username}) `; const userQueryPromise = dbQuery(userQuery); const [[userResult]] = await Promise.all([ userQueryPromise, verifyCalendarQueryThreadIDsPromise, ]); if (userResult.length === 0) { if (hasMinCodeVersion(viewer.platformDetails, { native: 150 })) { throw new ServerError('invalid_credentials'); } else { throw new ServerError('invalid_parameters'); } } const userRow = userResult[0]; if (!userRow.hash || !bcrypt.compareSync(request.password, userRow.hash)) { throw new ServerError('invalid_credentials'); } const id = userRow.id.toString(); return await processSuccessfulLogin({ viewer, input: request, userID: id, calendarQuery, signedIdentityKeysBlob, initialNotificationsEncryptedMessage, }); } export const siweAuthRequestInputValidator: TInterface = tShape({ signature: t.String, message: t.String, calendarQuery: entryQueryInputValidator, deviceTokenUpdateRequest: t.maybe(deviceTokenUpdateRequestInputValidator), platformDetails: tPlatformDetails, watchedIDs: t.list(tID), signedIdentityKeysBlob: t.maybe(signedIdentityKeysBlobValidator), initialNotificationsEncryptedMessage: t.maybe(t.String), doNotRegister: t.maybe(t.Boolean), }); async function siweAuthResponder( viewer: Viewer, request: SIWEAuthRequest, ): Promise { const { message, signature, deviceTokenUpdateRequest, platformDetails, signedIdentityKeysBlob, initialNotificationsEncryptedMessage, doNotRegister, } = request; const calendarQuery = normalizeCalendarQuery(request.calendarQuery); // 1. Ensure that `message` is a well formed Comm SIWE Auth message. const siweMessage: SIWEMessage = new SiweMessage(message); if (!isValidSIWEMessage(siweMessage)) { throw new ServerError('invalid_parameters'); } // 2. Ensure that the `nonce` exists in the `siwe_nonces` table // AND hasn't expired. If those conditions are met, delete the entry to // ensure that the same `nonce` can't be re-used in a future request. const wasNonceCheckedAndInvalidated = await checkAndInvalidateSIWENonceEntry( siweMessage.nonce, ); if (!wasNonceCheckedAndInvalidated) { throw new ServerError('invalid_parameters'); } // 3. Validate SIWEMessage signature and handle possible errors. try { await siweMessage.validate(signature); } catch (error) { if (error === ErrorTypes.EXPIRED_MESSAGE) { // Thrown when the `expirationTime` is present and in the past. throw new ServerError('expired_message'); } else if (error === ErrorTypes.INVALID_SIGNATURE) { // Thrown when the `validate()` function can't verify the message. throw new ServerError('invalid_signature'); } else if (error === ErrorTypes.MALFORMED_SESSION) { // Thrown when some required field is missing. throw new ServerError('malformed_session'); } else { throw new ServerError('unknown_error'); } } // 4. Pull `primaryIdentityPublicKey` out from SIWEMessage `statement`. // We expect it to be included for BOTH native and web clients. const { statement } = siweMessage; const primaryIdentityPublicKey = statement && isValidSIWEStatementWithPublicKey(statement) ? getPublicKeyFromSIWEStatement(statement) : null; if (!primaryIdentityPublicKey) { throw new ServerError('invalid_siwe_statement_public_key'); } // 5. Verify `signedIdentityKeysBlob.payload` with included `signature` // if `signedIdentityKeysBlob` was included in the `SIWEAuthRequest`. let identityKeys: ?IdentityKeysBlob; if (signedIdentityKeysBlob) { identityKeys = JSON.parse(signedIdentityKeysBlob.payload); if (!identityKeysBlobValidator.is(identityKeys)) { throw new ServerError('invalid_identity_keys_blob'); } const olmUtil: OlmUtility = getOlmUtility(); try { olmUtil.ed25519_verify( identityKeys.primaryIdentityPublicKeys.ed25519, signedIdentityKeysBlob.payload, signedIdentityKeysBlob.signature, ); } catch (e) { throw new ServerError('invalid_signature'); } } // 6. Ensure that `primaryIdentityPublicKeys.ed25519` matches SIWE // statement `primaryIdentityPublicKey` if `identityKeys` exists. if ( identityKeys && identityKeys.primaryIdentityPublicKeys.ed25519 !== primaryIdentityPublicKey ) { throw new ServerError('primary_public_key_mismatch'); } // 7. Construct `SIWESocialProof` object with the stringified // SIWEMessage and the corresponding signature. const socialProof: SIWESocialProof = { siweMessage: siweMessage.toMessage(), siweMessageSignature: signature, }; // 8. Create account with call to `processSIWEAccountCreation(...)` // if address does not correspond to an existing user. let userID = await fetchUserIDForEthereumAddress(siweMessage.address); if (!userID && doNotRegister) { throw new ServerError('account_does_not_exist'); } else if (!userID) { const siweAccountCreationRequest = { address: siweMessage.address, calendarQuery, deviceTokenUpdateRequest, platformDetails, socialProof, }; userID = await processSIWEAccountCreation( viewer, siweAccountCreationRequest, ); } // 9. Complete login with call to `processSuccessfulLogin(...)`. return await processSuccessfulLogin({ viewer, input: request, userID, calendarQuery, socialProof, signedIdentityKeysBlob, initialNotificationsEncryptedMessage, }); } export const updatePasswordRequestInputValidator: TInterface = tShape({ code: t.String, password: tPassword, watchedIDs: t.list(tID), calendarQuery: t.maybe(entryQueryInputValidator), deviceTokenUpdateRequest: t.maybe(deviceTokenUpdateRequestInputValidator), platformDetails: tPlatformDetails, }); async function oldPasswordUpdateResponder( viewer: Viewer, request: UpdatePasswordRequest, ): Promise { if (request.calendarQuery) { request.calendarQuery = normalizeCalendarQuery(request.calendarQuery); } return await updatePassword(viewer, request); } export const updateUserSettingsInputValidator: TInterface = tShape({ name: t.irreducible( userSettingsTypes.DEFAULT_NOTIFICATIONS, x => x === userSettingsTypes.DEFAULT_NOTIFICATIONS, ), data: t.enums.of(notificationTypeValues), }); async function updateUserSettingsResponder( viewer: Viewer, request: UpdateUserSettingsRequest, ): Promise { await updateUserSettings(viewer, request); } export const policyAcknowledgmentRequestInputValidator: TInterface = tShape({ policy: t.maybe(t.enums.of(policies)), }); async function policyAcknowledgmentResponder( viewer: Viewer, request: PolicyAcknowledgmentRequest, ): Promise { await viewerAcknowledgmentUpdater(viewer, request.policy); } export const updateUserAvatarResponseValidator: TInterface = tShape({ updates: createUpdatesResultValidator, }); export const updateUserAvatarResponderValidator: TUnion< ?ClientAvatar | UpdateUserAvatarResponse, > = t.union([ t.maybe(clientAvatarValidator), updateUserAvatarResponseValidator, ]); async function updateUserAvatarResponder( viewer: Viewer, request: UpdateUserAvatarRequest, ): Promise { return await updateUserAvatar(viewer, request); } export const claimUsernameResponseValidator: TInterface = tShape({ message: t.String, signature: t.String, }); async function claimUsernameResponder( viewer: Viewer, ): Promise { const [username, accountInfo] = await Promise.all([ fetchUsername(viewer.userID), fetchOlmAccount('content'), ]); if (!username) { throw new ServerError('invalid_credentials'); } const issuedAt = new Date().toISOString(); const reservedUsernameMessage: ReservedUsernameMessage = { statement: 'This user is the owner of the following username and user ID', payload: { username, userID: viewer.userID, }, issuedAt, }; const message = JSON.stringify(reservedUsernameMessage); const signature = accountInfo.account.sign(message); return { message, signature }; } export { userSubscriptionUpdateResponder, passwordUpdateResponder, sendVerificationEmailResponder, sendPasswordResetEmailResponder, logOutResponder, accountDeletionResponder, accountCreationResponder, logInResponder, siweAuthResponder, oldPasswordUpdateResponder, updateUserSettingsResponder, policyAcknowledgmentResponder, updateUserAvatarResponder, claimUsernameResponder, }; diff --git a/keyserver/src/scripts/merge-users.js b/keyserver/src/scripts/merge-users.js index 83beaea78..093abae0e 100644 --- a/keyserver/src/scripts/merge-users.js +++ b/keyserver/src/scripts/merge-users.js @@ -1,196 +1,196 @@ // @flow import type { Shape } from 'lib/types/core.js'; import type { ServerThreadInfo } from 'lib/types/thread-types.js'; import { updateTypes } from 'lib/types/update-types-enum.js'; import { type UpdateData } from 'lib/types/update-types.js'; import { endScript } from './utils.js'; import { createUpdates } from '../creators/update-creator.js'; import { dbQuery, SQL } from '../database/database.js'; import type { SQLStatementType } from '../database/types.js'; import { deleteAccount } from '../deleters/account-deleters.js'; import { fetchServerThreadInfos } from '../fetchers/thread-fetchers.js'; import { createScriptViewer } from '../session/scripts.js'; import { changeRole, commitMembershipChangeset, type MembershipRow, } from '../updaters/thread-permission-updaters.js'; import RelationshipChangeset from '../utils/relationship-changeset.js'; async function main() { try { await mergeUsers('7147', '15972', { username: true, password: true }); endScript(); } catch (e) { endScript(); console.warn(e); } } type ReplaceUserInfo = Shape<{ +username: boolean, +password: boolean, }>; async function mergeUsers( fromUserID: string, toUserID: string, replaceUserInfo?: ReplaceUserInfo, ) { let updateUserRowQuery: ?SQLStatementType = null; let updateDatas: UpdateData[] = []; if (replaceUserInfo) { const replaceUserResult = await replaceUser( fromUserID, toUserID, replaceUserInfo, ); ({ sql: updateUserRowQuery, updateDatas } = replaceUserResult); } - const usersGettingUpdate = new Set(); - const usersNeedingUpdate = new Set(); + const usersGettingUpdate = new Set(); + const usersNeedingUpdate = new Set(); const needUserInfoUpdate = replaceUserInfo && replaceUserInfo.username; const setGettingUpdate = (threadInfo: ServerThreadInfo) => { if (!needUserInfoUpdate) { return; } for (const { id } of threadInfo.members) { usersGettingUpdate.add(id); usersNeedingUpdate.delete(id); } }; const setNeedingUpdate = (threadInfo: ServerThreadInfo) => { if (!needUserInfoUpdate) { return; } for (const { id } of threadInfo.members) { if (!usersGettingUpdate.has(id)) { usersNeedingUpdate.add(id); } } }; const newThreadRolePairs = []; const { threadInfos } = await fetchServerThreadInfos(); for (const threadID in threadInfos) { const threadInfo = threadInfos[threadID]; const fromUserExistingMember = threadInfo.members.find( memberInfo => memberInfo.id === fromUserID, ); if (!fromUserExistingMember) { setNeedingUpdate(threadInfo); continue; } const { role } = fromUserExistingMember; if (!role) { // Only transfer explicit memberships setNeedingUpdate(threadInfo); continue; } const toUserExistingMember = threadInfo.members.find( memberInfo => memberInfo.id === toUserID, ); if (!toUserExistingMember || !toUserExistingMember.role) { setGettingUpdate(threadInfo); newThreadRolePairs.push([threadID, role]); } else { setNeedingUpdate(threadInfo); } } const fromViewer = createScriptViewer(fromUserID); await deleteAccount(fromViewer); if (updateUserRowQuery) { await dbQuery(updateUserRowQuery); } const time = Date.now(); for (const userID of usersNeedingUpdate) { updateDatas.push({ type: updateTypes.UPDATE_USER, userID, time, updatedUserID: toUserID, }); } await createUpdates(updateDatas); const changesets = await Promise.all( newThreadRolePairs.map(([threadID, role]) => changeRole(threadID, [toUserID], role), ), ); const membershipRows: Array = []; const relationshipChangeset = new RelationshipChangeset(); for (const currentChangeset of changesets) { const { membershipRows: currentMembershipRows, relationshipChangeset: currentRelationshipChangeset, } = currentChangeset; membershipRows.push(...currentMembershipRows); relationshipChangeset.addAll(currentRelationshipChangeset); } if (membershipRows.length > 0 || relationshipChangeset.getRowCount() > 0) { const toViewer = createScriptViewer(toUserID); const changeset = { membershipRows, relationshipChangeset }; await commitMembershipChangeset(toViewer, changeset); } } type ReplaceUserResult = { sql: ?SQLStatementType, updateDatas: UpdateData[], }; async function replaceUser( fromUserID: string, toUserID: string, replaceUserInfo: ReplaceUserInfo, ): Promise { if (Object.keys(replaceUserInfo).length === 0) { return { sql: null, updateDatas: [], }; } const fromUserQuery = SQL` SELECT username, hash FROM users WHERE id = ${fromUserID} `; const [fromUserResult] = await dbQuery(fromUserQuery); const [firstResult] = fromUserResult; if (!firstResult) { throw new Error(`couldn't fetch fromUserID ${fromUserID}`); } const changedFields: { [string]: string } = {}; if (replaceUserInfo.username) { changedFields.username = firstResult.username; } if (replaceUserInfo.password) { changedFields.hash = firstResult.hash; } const updateUserRowQuery = SQL` UPDATE users SET ${changedFields} WHERE id = ${toUserID} `; const updateDatas: UpdateData[] = []; if (replaceUserInfo.username) { updateDatas.push({ type: updateTypes.UPDATE_CURRENT_USER, userID: toUserID, time: Date.now(), }); } return { sql: updateUserRowQuery, updateDatas, }; } main(); diff --git a/keyserver/src/scripts/validate-role-permissions.js b/keyserver/src/scripts/validate-role-permissions.js index 010d54cc9..4eed4b770 100644 --- a/keyserver/src/scripts/validate-role-permissions.js +++ b/keyserver/src/scripts/validate-role-permissions.js @@ -1,167 +1,170 @@ // @flow import { specialRoles } from 'lib/permissions/special-roles.js'; import { getRolePermissionBlobs, getUniversalCommunityRootPermissionsBlob, } from 'lib/permissions/thread-permissions.js'; import { configurableCommunityPermissions, userSurfacedPermissions, + type UserSurfacedPermission, } from 'lib/types/thread-permission-types.js'; import { threadTypes } from 'lib/types/thread-types-enum.js'; import { deepDiff, values } from 'lib/utils/objects.js'; import { main } from './utils.js'; import { SQL, dbQuery } from '../database/database.js'; async function validateRolePermissions() { // Get all roles for existing communities since custom roles are at a // community-level rather than a thread-level. const fetchRolesQuery = SQL` SELECT r.id, r.name, r.permissions, r.thread, r.special_role = ${specialRoles.DEFAULT_ROLE} AS is_default, t.type FROM roles r INNER JOIN threads t ON t.id = r.thread WHERE t.type IN (${[ threadTypes.COMMUNITY_ANNOUNCEMENT_ROOT, threadTypes.COMMUNITY_ROOT, ]}) `; const [results] = await dbQuery(fetchRolesQuery); for (const result of results) { const roleID = result.id.toString(); const roleName = result.name; const existingRolePermissions = JSON.parse(result.permissions); const roleIsDefaultRole = Boolean(result.is_default); const threadID = result.thread.toString(); const threadType = result.type; const universalCommunityPermissions = getUniversalCommunityRootPermissionsBlob(threadType); // Get the 'expected permissions' set for the role. If the role is // default (Members) or Admins, these permission blobs can be retrieved // by calling getRolePermissionBlobs with the threadType. Otherwise, the // role is a custom role and the expected permissions are the universal // community permissions assuming the role has not been edited. // The case of a role being edited is handled below. const expectedPermissionBlobs = getRolePermissionBlobs(threadType); let baseExpectedPermissionBlob; if (roleIsDefaultRole) { baseExpectedPermissionBlob = expectedPermissionBlobs.Members; } else if (roleName === 'Admins') { baseExpectedPermissionBlob = expectedPermissionBlobs.Admins; } else if (roleName) { baseExpectedPermissionBlob = universalCommunityPermissions; } else { baseExpectedPermissionBlob = {}; } console.log('===================================='); // Ideally, this should never happen, but we'll skip over this in case. if (!baseExpectedPermissionBlob) { console.log( `Skipping role ${roleName} with ID (${roleID}) in thread ${threadID}`, ); continue; } // Deep diff seems to compare objects one-way (so deepDiff(a, b) !== // deepDiff(b, a)). This means that if a key is not in `a` but not in `b`, // the diff will not include that key. As a result, we need to compare both // ways to ensure that we're not missing any permission discrepancies. const expectedPermissionsToExistingPermissions = deepDiff( baseExpectedPermissionBlob, existingRolePermissions, ); const existingPermissionsToExpectedPermissions = deepDiff( existingRolePermissions, baseExpectedPermissionBlob, ); console.log( `Validating: Role Name (${roleName}) | Role ID (${roleID}) | ` + `Thread Type (${threadType}) | Thread ID (${threadID})\n`, ); console.log( `deepDiff(baseExpectedPermissionBlob, existingRolePermissions) = ${JSON.stringify( expectedPermissionsToExistingPermissions, null, 2, )}\n`, ); console.log( `deepDiff(existingRolePermissions, baseExpectedPermissionBlob) = ${JSON.stringify( existingPermissionsToExpectedPermissions, null, 2, )}\n`, ); // Now, we want to see if the permission discrepancies are due to the user // editing the role. To do this, we need to identify any permission // discrepancies that could be linked to a specific user-surfaced // permission. This could be useful in manually parsing through the // script results to 'write off' discrepancies as user role edits. - const userSurfacedExpectedPermissionsToExistingPermissions = new Set(); - const userSurfacedExistingPermissionsToExpectedPermissions = new Set(); + const userSurfacedExpectedPermissionsToExistingPermissions = + new Set(); + const userSurfacedExistingPermissionsToExpectedPermissions = + new Set(); for (const permission of values(userSurfacedPermissions)) { const permissionSet = Array.from( configurableCommunityPermissions[permission], ); for (const p of permissionSet) { if (expectedPermissionsToExistingPermissions[p] === true) { userSurfacedExpectedPermissionsToExistingPermissions.add(permission); } if (existingPermissionsToExpectedPermissions[p] === true) { userSurfacedExistingPermissionsToExpectedPermissions.add(permission); } } } const expectedPermissionsToExistingPermissionsValues = values( expectedPermissionsToExistingPermissions, ); const existingPermissionsToExpectedPermissionsValues = values( existingPermissionsToExpectedPermissions, ); if ( expectedPermissionsToExistingPermissionsValues.length > 0 || existingPermissionsToExpectedPermissionsValues.length > 0 ) { console.log( `Potential permission discrepancies for role ${roleName} that ` + `could be linked back to user surfaced permissions (i.e. not an ` + `actual discrepancy, but rather a user edited a role): \n`, ); if (expectedPermissionsToExistingPermissionsValues.length > 0) { console.log( `userSurfacedExpectedPermissionsToExistingPermissions = ${JSON.stringify( [...userSurfacedExpectedPermissionsToExistingPermissions], null, 2, )}\n`, ); } if (existingPermissionsToExpectedPermissionsValues.length > 0) { console.log( `userSurfacedExistingPermissionsToExpectedPermissions = ${JSON.stringify( [...userSurfacedExistingPermissionsToExpectedPermissions], null, 2, )}`, ); } } } console.log('===================================='); } main([validateRolePermissions]); diff --git a/keyserver/src/socket/session-utils.js b/keyserver/src/socket/session-utils.js index 8fe71f856..12fc3d42e 100644 --- a/keyserver/src/socket/session-utils.js +++ b/keyserver/src/socket/session-utils.js @@ -1,559 +1,559 @@ // @flow import invariant from 'invariant'; import t from 'tcomb'; import type { TUnion } from 'tcomb'; import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; import type { UpdateActivityResult, ActivityUpdate, } from 'lib/types/activity-types.js'; import type { IdentityKeysBlob } from 'lib/types/crypto-types.js'; import { isDeviceType } from 'lib/types/device-types.js'; import type { CalendarQuery, DeltaEntryInfosResponse, } from 'lib/types/entry-types.js'; import { reportTypes, type ThreadInconsistencyReportCreationRequest, type EntryInconsistencyReportCreationRequest, } from 'lib/types/report-types.js'; import { serverRequestTypes, type ThreadInconsistencyClientResponse, type EntryInconsistencyClientResponse, type ClientResponse, type ServerServerRequest, type ServerCheckStateServerRequest, } from 'lib/types/request-types.js'; import { sessionCheckFrequency } from 'lib/types/session-types.js'; import { signedIdentityKeysBlobValidator } from 'lib/utils/crypto-utils.js'; import { hash, values } from 'lib/utils/objects.js'; import { promiseAll } from 'lib/utils/promises.js'; import { tShape, tPlatform, tPlatformDetails, } from 'lib/utils/validation-utils.js'; import { createOlmSession } from '../creators/olm-session-creator.js'; import { saveOneTimeKeys } from '../creators/one-time-keys-creator.js'; import createReport from '../creators/report-creator.js'; import { fetchEntriesForSession } from '../fetchers/entry-fetchers.js'; import { checkIfSessionHasEnoughOneTimeKeys } from '../fetchers/key-fetchers.js'; import { activityUpdatesInputValidator } from '../responders/activity-responders.js'; import { handleAsyncPromise } from '../responders/handlers.js'; import { threadInconsistencyReportValidatorShape, entryInconsistencyReportValidatorShape, } from '../responders/report-responders.js'; import { setNewSession, setCookiePlatform, setCookiePlatformDetails, setCookieSignedIdentityKeysBlob, } from '../session/cookies.js'; import type { Viewer } from '../session/viewer.js'; import { serverStateSyncSpecs } from '../shared/state-sync/state-sync-specs.js'; import { activityUpdater } from '../updaters/activity-updaters.js'; import { compareNewCalendarQuery } from '../updaters/entry-updaters.js'; import type { SessionUpdate } from '../updaters/session-updaters.js'; import { getOlmUtility } from '../utils/olm-utils.js'; const clientResponseInputValidator: TUnion = t.union([ tShape({ type: t.irreducible( 'serverRequestTypes.PLATFORM', x => x === serverRequestTypes.PLATFORM, ), platform: tPlatform, }), tShape({ ...threadInconsistencyReportValidatorShape, type: t.irreducible( 'serverRequestTypes.THREAD_INCONSISTENCY', x => x === serverRequestTypes.THREAD_INCONSISTENCY, ), }), tShape({ ...entryInconsistencyReportValidatorShape, type: t.irreducible( 'serverRequestTypes.ENTRY_INCONSISTENCY', x => x === serverRequestTypes.ENTRY_INCONSISTENCY, ), }), tShape({ type: t.irreducible( 'serverRequestTypes.PLATFORM_DETAILS', x => x === serverRequestTypes.PLATFORM_DETAILS, ), platformDetails: tPlatformDetails, }), tShape({ type: t.irreducible( 'serverRequestTypes.CHECK_STATE', x => x === serverRequestTypes.CHECK_STATE, ), hashResults: t.dict(t.String, t.Boolean), }), tShape({ type: t.irreducible( 'serverRequestTypes.INITIAL_ACTIVITY_UPDATES', x => x === serverRequestTypes.INITIAL_ACTIVITY_UPDATES, ), activityUpdates: activityUpdatesInputValidator, }), tShape({ type: t.irreducible( 'serverRequestTypes.MORE_ONE_TIME_KEYS', x => x === serverRequestTypes.MORE_ONE_TIME_KEYS, ), keys: t.list(t.String), }), tShape({ type: t.irreducible( 'serverRequestTypes.SIGNED_IDENTITY_KEYS_BLOB', x => x === serverRequestTypes.SIGNED_IDENTITY_KEYS_BLOB, ), signedIdentityKeysBlob: signedIdentityKeysBlobValidator, }), tShape({ type: t.irreducible( 'serverRequestTypes.INITIAL_NOTIFICATIONS_ENCRYPTED_MESSAGE', x => x === serverRequestTypes.INITIAL_NOTIFICATIONS_ENCRYPTED_MESSAGE, ), initialNotificationsEncryptedMessage: t.String, }), ]); type StateCheckStatus = | { status: 'state_validated' } | { status: 'state_invalid', invalidKeys: $ReadOnlyArray } | { status: 'state_check' }; type ProcessClientResponsesResult = { serverRequests: ServerServerRequest[], stateCheckStatus: ?StateCheckStatus, activityUpdateResult: ?UpdateActivityResult, }; async function processClientResponses( viewer: Viewer, clientResponses: $ReadOnlyArray, ): Promise { let viewerMissingPlatform = !viewer.platform; const { platformDetails } = viewer; let viewerMissingPlatformDetails = !platformDetails || (isDeviceType(viewer.platform) && (platformDetails.codeVersion === null || platformDetails.codeVersion === undefined || platformDetails.stateVersion === null || platformDetails.stateVersion === undefined)); const promises = []; let activityUpdates: Array = []; let stateCheckStatus = null; const clientSentPlatformDetails = clientResponses.some( response => response.type === serverRequestTypes.PLATFORM_DETAILS, ); for (const clientResponse of clientResponses) { if ( clientResponse.type === serverRequestTypes.PLATFORM && !clientSentPlatformDetails ) { promises.push(setCookiePlatform(viewer, clientResponse.platform)); viewerMissingPlatform = false; if (!isDeviceType(clientResponse.platform)) { viewerMissingPlatformDetails = false; } } else if ( clientResponse.type === serverRequestTypes.THREAD_INCONSISTENCY ) { promises.push(recordThreadInconsistency(viewer, clientResponse)); } else if (clientResponse.type === serverRequestTypes.ENTRY_INCONSISTENCY) { promises.push(recordEntryInconsistency(viewer, clientResponse)); } else if (clientResponse.type === serverRequestTypes.PLATFORM_DETAILS) { promises.push( setCookiePlatformDetails(viewer, clientResponse.platformDetails), ); viewerMissingPlatform = false; viewerMissingPlatformDetails = false; } else if ( clientResponse.type === serverRequestTypes.INITIAL_ACTIVITY_UPDATES ) { activityUpdates = [...activityUpdates, ...clientResponse.activityUpdates]; } else if (clientResponse.type === serverRequestTypes.CHECK_STATE) { const invalidKeys = []; for (const key in clientResponse.hashResults) { const result = clientResponse.hashResults[key]; if (!result) { invalidKeys.push(key); } } stateCheckStatus = invalidKeys.length > 0 ? { status: 'state_invalid', invalidKeys } : { status: 'state_validated' }; } else if (clientResponse.type === serverRequestTypes.MORE_ONE_TIME_KEYS) { invariant(clientResponse.keys, 'keys expected in client response'); handleAsyncPromise(saveOneTimeKeys(viewer, clientResponse.keys)); } else if ( clientResponse.type === serverRequestTypes.SIGNED_IDENTITY_KEYS_BLOB ) { invariant( clientResponse.signedIdentityKeysBlob, 'signedIdentityKeysBlob expected in client response', ); const { signedIdentityKeysBlob } = clientResponse; const identityKeys: IdentityKeysBlob = JSON.parse( signedIdentityKeysBlob.payload, ); const olmUtil = getOlmUtility(); try { olmUtil.ed25519_verify( identityKeys.primaryIdentityPublicKeys.ed25519, signedIdentityKeysBlob.payload, signedIdentityKeysBlob.signature, ); handleAsyncPromise( setCookieSignedIdentityKeysBlob( viewer.cookieID, signedIdentityKeysBlob, ), ); } catch (e) { continue; } } else if ( clientResponse.type === serverRequestTypes.INITIAL_NOTIFICATIONS_ENCRYPTED_MESSAGE ) { invariant( t.String.is(clientResponse.initialNotificationsEncryptedMessage), 'initialNotificationsEncryptedMessage expected in client response', ); const { initialNotificationsEncryptedMessage } = clientResponse; try { await createOlmSession( initialNotificationsEncryptedMessage, 'notifications', viewer.cookieID, ); } catch (e) { continue; } } } const activityUpdatePromise: Promise = (async () => { if (activityUpdates.length === 0) { return undefined; } return await activityUpdater(viewer, { updates: activityUpdates }); })(); const serverRequests: Array = []; const checkOneTimeKeysPromise = (async () => { if (!viewer.loggedIn) { return; } const enoughOneTimeKeys = await checkIfSessionHasEnoughOneTimeKeys( viewer.session, ); if (!enoughOneTimeKeys) { serverRequests.push({ type: serverRequestTypes.MORE_ONE_TIME_KEYS }); } })(); const { activityUpdateResult } = await promiseAll({ all: Promise.all(promises), activityUpdateResult: activityUpdatePromise, checkOneTimeKeysPromise, }); if ( !stateCheckStatus && viewer.loggedIn && viewer.sessionLastValidated + sessionCheckFrequency < Date.now() ) { stateCheckStatus = { status: 'state_check' }; } if (viewerMissingPlatform) { serverRequests.push({ type: serverRequestTypes.PLATFORM }); } if (viewerMissingPlatformDetails) { serverRequests.push({ type: serverRequestTypes.PLATFORM_DETAILS }); } return { serverRequests, stateCheckStatus, activityUpdateResult }; } async function recordThreadInconsistency( viewer: Viewer, response: ThreadInconsistencyClientResponse, ): Promise { const { type, ...rest } = response; const reportCreationRequest = ({ ...rest, type: reportTypes.THREAD_INCONSISTENCY, }: ThreadInconsistencyReportCreationRequest); await createReport(viewer, reportCreationRequest); } async function recordEntryInconsistency( viewer: Viewer, response: EntryInconsistencyClientResponse, ): Promise { const { type, ...rest } = response; const reportCreationRequest = ({ ...rest, type: reportTypes.ENTRY_INCONSISTENCY, }: EntryInconsistencyReportCreationRequest); await createReport(viewer, reportCreationRequest); } type SessionInitializationResult = | { sessionContinued: false } | { sessionContinued: true, deltaEntryInfoResult: DeltaEntryInfosResponse, sessionUpdate: SessionUpdate, }; async function initializeSession( viewer: Viewer, calendarQuery: CalendarQuery, oldLastUpdate: number, ): Promise { if (!viewer.loggedIn) { return { sessionContinued: false }; } if (!viewer.hasSessionInfo) { // If the viewer has no session info but is logged in, that is indicative // of an expired / invalidated session and we should generate a new one await setNewSession(viewer, calendarQuery, oldLastUpdate); return { sessionContinued: false }; } if (oldLastUpdate < viewer.sessionLastUpdated) { // If the client has an older last_update than the server is tracking for // that client, then the client either had some issue persisting its store, // or the user restored the client app from a backup. Either way, we should // invalidate the existing session, since the server has assumed that the // checkpoint is further along than it is on the client, and might not still // have all of the updates necessary to do an incremental update await setNewSession(viewer, calendarQuery, oldLastUpdate); return { sessionContinued: false }; } let comparisonResult = null; try { comparisonResult = compareNewCalendarQuery(viewer, calendarQuery); } catch (e) { if (e.message !== 'unknown_error') { throw e; } } if (comparisonResult) { const { difference, oldCalendarQuery } = comparisonResult; const sessionUpdate = { ...comparisonResult.sessionUpdate, lastUpdate: oldLastUpdate, }; const deltaEntryInfoResult = await fetchEntriesForSession( viewer, difference, oldCalendarQuery, ); return { sessionContinued: true, deltaEntryInfoResult, sessionUpdate }; } else { await setNewSession(viewer, calendarQuery, oldLastUpdate); return { sessionContinued: false }; } } type StateCheckResult = { sessionUpdate?: SessionUpdate, checkStateRequest?: ServerCheckStateServerRequest, }; async function checkState( viewer: Viewer, status: StateCheckStatus, ): Promise { if (status.status === 'state_validated') { return { sessionUpdate: { lastValidated: Date.now() } }; } else if (status.status === 'state_check') { const promises = Object.fromEntries( values(serverStateSyncSpecs).map(spec => [ spec.hashKey, (async () => { if ( !hasMinCodeVersion(viewer.platformDetails, { native: 267, web: 32, }) ) { const data = await spec.fetch(viewer); return hash(data); } const infosHash = await spec.fetchServerInfosHash(viewer); return infosHash; })(), ]), ); const hashesToCheck = await promiseAll(promises); const checkStateRequest = { type: serverRequestTypes.CHECK_STATE, hashesToCheck, }; return { checkStateRequest }; } const invalidKeys = new Set(status.invalidKeys); const shouldFetchAll = Object.fromEntries( values(serverStateSyncSpecs).map(spec => [ spec.hashKey, invalidKeys.has(spec.hashKey), ]), ); const idsToFetch = Object.fromEntries( values(serverStateSyncSpecs) .filter(spec => spec.innerHashSpec?.hashKey) - .map(spec => [spec.innerHashSpec?.hashKey, new Set()]), + .map(spec => [spec.innerHashSpec?.hashKey, new Set()]), ); for (const key of invalidKeys) { const [innerHashKey, id] = key.split('|'); if (innerHashKey && id) { idsToFetch[innerHashKey]?.add(id); } } const fetchPromises: { [string]: Promise } = {}; for (const spec of values(serverStateSyncSpecs)) { if (shouldFetchAll[spec.hashKey]) { fetchPromises[spec.hashKey] = spec.fetch(viewer); } else if (idsToFetch[spec.innerHashSpec?.hashKey]?.size > 0) { fetchPromises[spec.hashKey] = spec.fetch( viewer, idsToFetch[spec.innerHashSpec?.hashKey], ); } } const fetchedData = await promiseAll(fetchPromises); const specPerHashKey = Object.fromEntries( values(serverStateSyncSpecs).map(spec => [spec.hashKey, spec]), ); const specPerInnerHashKey = Object.fromEntries( values(serverStateSyncSpecs) .filter(spec => spec.innerHashSpec?.hashKey) .map(spec => [spec.innerHashSpec?.hashKey, spec]), ); const hashesToCheck: { [string]: number } = {}, failUnmentioned: { [string]: boolean } = {}, stateChanges: { [string]: mixed } = {}; for (const key of invalidKeys) { const spec = specPerHashKey[key]; const innerHashKey = spec?.innerHashSpec?.hashKey; const isTopLevelKey = !!spec; if (isTopLevelKey && innerHashKey) { // Instead of returning all the infos, we want to narrow down and figure // out which infos don't match first const infos = fetchedData[key]; // We have a type error here because in fact the relationship between // Infos and Info is not guaranteed to be like this. In particular, // currentUserStateSyncSpec does not match this pattern. But this code // doesn't fire for it because no innerHashSpec is defined const iterableInfos: { +[string]: mixed } = (infos: any); for (const infoID in iterableInfos) { let hashValue; if ( hasMinCodeVersion(viewer.platformDetails, { native: 267, web: 32, }) ) { // We have a type error here because Flow has no way to determine that // spec and infos are matched up hashValue = spec.getServerInfoHash((iterableInfos[infoID]: any)); } else { hashValue = hash(iterableInfos[infoID]); } hashesToCheck[`${innerHashKey}|${infoID}`] = hashValue; } failUnmentioned[key] = true; } else if (isTopLevelKey) { stateChanges[key] = fetchedData[key]; } else { const [keyPrefix, id] = key.split('|'); const innerSpec = specPerInnerHashKey[keyPrefix]; const innerHashSpec = innerSpec?.innerHashSpec; if (!innerHashSpec || !id) { continue; } const infos = fetchedData[innerSpec.hashKey]; // We have a type error here because in fact the relationship between // Infos and Info is not guaranteed to be like this. In particular, // currentUserStateSyncSpec does not match this pattern. But this code // doesn't fire for it because no innerHashSpec is defined const iterableInfos: { +[string]: mixed } = (infos: any); const info = iterableInfos[id]; // We have a type error here because Flow wants us to type iterableInfos // in this file, but we don't have access to the parameterization of // innerHashSpec here if (!info || innerHashSpec.additionalDeleteCondition?.((info: any))) { if (!stateChanges[innerHashSpec.deleteKey]) { stateChanges[innerHashSpec.deleteKey] = [id]; } else { // We have a type error here because in fact stateChanges values // aren't always arrays. In particular, currentUserStateSyncSpec does // not match this pattern. But this code doesn't fire for it because // no innerHashSpec is defined const curDeleteKeyChanges: Array = (stateChanges[ innerHashSpec.deleteKey ]: any); curDeleteKeyChanges.push(id); } continue; } if (!stateChanges[innerHashSpec.rawInfosKey]) { stateChanges[innerHashSpec.rawInfosKey] = [info]; } else { // We have a type error here because in fact stateChanges values aren't // always arrays. In particular, currentUserStateSyncSpec does not match // this pattern. But this code doesn't fire for it because no // innerHashSpec is defined const curRawInfosKeyChanges: Array = (stateChanges[ innerHashSpec.rawInfosKey ]: any); curRawInfosKeyChanges.push(info); } } } // We have a type error here because the keys that get set on some of these // collections aren't statically typed when they're set. Rather, they are set // as arbitrary strings const checkStateRequest: ServerCheckStateServerRequest = ({ type: serverRequestTypes.CHECK_STATE, hashesToCheck, failUnmentioned, stateChanges, }: any); if (Object.keys(hashesToCheck).length === 0) { return { checkStateRequest, sessionUpdate: { lastValidated: Date.now() } }; } else { return { checkStateRequest }; } } export { clientResponseInputValidator, processClientResponses, initializeSession, checkState, }; diff --git a/keyserver/src/socket/socket.js b/keyserver/src/socket/socket.js index eebef70e8..55433cda2 100644 --- a/keyserver/src/socket/socket.js +++ b/keyserver/src/socket/socket.js @@ -1,886 +1,886 @@ // @flow import type { $Request } from 'express'; import invariant from 'invariant'; import _debounce from 'lodash/debounce.js'; import t from 'tcomb'; import type { TUnion } from 'tcomb'; import WebSocket from 'ws'; import { baseLegalPolicies } from 'lib/facts/policies.js'; import { mostRecentMessageTimestamp } from 'lib/shared/message-utils.js'; import { isStaff } from 'lib/shared/staff-utils.js'; import { serverRequestSocketTimeout, serverResponseTimeout, } from 'lib/shared/timeouts.js'; import { mostRecentUpdateTimestamp } from 'lib/shared/update-utils.js'; import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; import type { Shape } from 'lib/types/core.js'; import { endpointIsSocketSafe } from 'lib/types/endpoints.js'; import type { RawEntryInfo } from 'lib/types/entry-types.js'; import { defaultNumberPerThread } from 'lib/types/message-types.js'; import { redisMessageTypes, type RedisMessage } from 'lib/types/redis-types.js'; import { serverRequestTypes } from 'lib/types/request-types.js'; import { sessionCheckFrequency, stateCheckInactivityActivationInterval, } from 'lib/types/session-types.js'; import { type ClientSocketMessage, type InitialClientSocketMessage, type ResponsesClientSocketMessage, type ServerStateSyncFullSocketPayload, type ServerServerSocketMessage, type ErrorServerSocketMessage, type AuthErrorServerSocketMessage, type PingClientSocketMessage, type AckUpdatesClientSocketMessage, type APIRequestClientSocketMessage, clientSocketMessageTypes, stateSyncPayloadTypes, serverSocketMessageTypes, serverServerSocketMessageValidator, } from 'lib/types/socket-types.js'; import type { RawThreadInfos } from 'lib/types/thread-types.js'; import type { UserInfo, CurrentUserInfo } from 'lib/types/user-types.js'; import { ServerError } from 'lib/utils/errors.js'; import { values } from 'lib/utils/objects.js'; import { promiseAll } from 'lib/utils/promises.js'; import SequentialPromiseResolver from 'lib/utils/sequential-promise-resolver.js'; import sleep from 'lib/utils/sleep.js'; import { tShape, tCookie } from 'lib/utils/validation-utils.js'; import { RedisSubscriber } from './redis.js'; import { clientResponseInputValidator, processClientResponses, initializeSession, checkState, } from './session-utils.js'; import { fetchUpdateInfosWithRawUpdateInfos } from '../creators/update-creator.js'; import { deleteActivityForViewerSession } from '../deleters/activity-deleters.js'; import { deleteCookie } from '../deleters/cookie-deleters.js'; import { deleteUpdatesBeforeTimeTargetingSession } from '../deleters/update-deleters.js'; import { jsonEndpoints } from '../endpoints.js'; import { fetchMessageInfosSince, getMessageFetchResultFromRedisMessages, } from '../fetchers/message-fetchers.js'; import { fetchUpdateInfos } from '../fetchers/update-fetchers.js'; import { newEntryQueryInputValidator, verifyCalendarQueryThreadIDs, } from '../responders/entry-responders.js'; import { handleAsyncPromise } from '../responders/handlers.js'; import { fetchViewerForSocket, updateCookie, isCookieMissingSignedIdentityKeysBlob, isCookieMissingOlmNotificationsSession, createNewAnonymousCookie, } from '../session/cookies.js'; import { Viewer } from '../session/viewer.js'; import type { AnonymousViewerData } from '../session/viewer.js'; import { serverStateSyncSpecs } from '../shared/state-sync/state-sync-specs.js'; import { commitSessionUpdate } from '../updaters/session-updaters.js'; import { compressMessage } from '../utils/compress.js'; import { assertSecureRequest } from '../utils/security-utils.js'; import { checkInputValidator, checkClientSupported, policiesValidator, validateOutput, } from '../utils/validation-utils.js'; const clientSocketMessageInputValidator: TUnion = t.union([ - tShape({ + tShape({ type: t.irreducible( 'clientSocketMessageTypes.INITIAL', x => x === clientSocketMessageTypes.INITIAL, ), id: t.Number, payload: tShape({ sessionIdentification: tShape({ cookie: t.maybe(tCookie), sessionID: t.maybe(t.String), }), sessionState: tShape({ calendarQuery: newEntryQueryInputValidator, messagesCurrentAsOf: t.Number, updatesCurrentAsOf: t.Number, watchedIDs: t.list(t.String), }), clientResponses: t.list(clientResponseInputValidator), }), }), - tShape({ + tShape({ type: t.irreducible( 'clientSocketMessageTypes.RESPONSES', x => x === clientSocketMessageTypes.RESPONSES, ), id: t.Number, payload: tShape({ clientResponses: t.list(clientResponseInputValidator), }), }), - tShape({ + tShape({ type: t.irreducible( 'clientSocketMessageTypes.PING', x => x === clientSocketMessageTypes.PING, ), id: t.Number, }), - tShape({ + tShape({ type: t.irreducible( 'clientSocketMessageTypes.ACK_UPDATES', x => x === clientSocketMessageTypes.ACK_UPDATES, ), id: t.Number, payload: tShape({ currentAsOf: t.Number, }), }), - tShape({ + tShape({ type: t.irreducible( 'clientSocketMessageTypes.API_REQUEST', x => x === clientSocketMessageTypes.API_REQUEST, ), id: t.Number, payload: tShape({ endpoint: t.String, input: t.maybe(t.Object), }), }), ]); function onConnection(ws: WebSocket, req: $Request) { assertSecureRequest(req); new Socket(ws, req); } type StateCheckConditions = { activityRecentlyOccurred: boolean, stateCheckOngoing: boolean, }; const minVersionsForCompression = { native: 265, web: 30, }; class Socket { ws: WebSocket; httpRequest: $Request; viewer: ?Viewer; redis: ?RedisSubscriber; redisPromiseResolver: SequentialPromiseResolver; stateCheckConditions: StateCheckConditions = { activityRecentlyOccurred: true, stateCheckOngoing: false, }; stateCheckTimeoutID: ?TimeoutID; constructor(ws: WebSocket, httpRequest: $Request) { this.ws = ws; this.httpRequest = httpRequest; ws.on('message', this.onMessage); ws.on('close', this.onClose); this.resetTimeout(); this.redisPromiseResolver = new SequentialPromiseResolver(this.sendMessage); } onMessage = async ( messageString: string | Buffer | ArrayBuffer | Array, ) => { invariant(typeof messageString === 'string', 'message should be string'); let clientSocketMessage: ?ClientSocketMessage; try { this.resetTimeout(); const messageObject = JSON.parse(messageString); clientSocketMessage = checkInputValidator( clientSocketMessageInputValidator, messageObject, ); if (clientSocketMessage.type === clientSocketMessageTypes.INITIAL) { if (this.viewer) { // This indicates that the user sent multiple INITIAL messages. throw new ServerError('socket_already_initialized'); } this.viewer = await fetchViewerForSocket( this.httpRequest, clientSocketMessage, ); } const { viewer } = this; if (!viewer) { // This indicates a non-INITIAL message was sent by the client before // the INITIAL message. throw new ServerError('socket_uninitialized'); } if (viewer.sessionChanged) { // This indicates that the cookie was invalid, and we've assigned a new // anonymous one. throw new ServerError('socket_deauthorized'); } if (!viewer.loggedIn) { // This indicates that the specified cookie was an anonymous one. throw new ServerError('not_logged_in'); } await checkClientSupported( viewer, clientSocketMessageInputValidator, clientSocketMessage, ); await policiesValidator(viewer, baseLegalPolicies); const serverResponses = await this.handleClientSocketMessage( clientSocketMessage, ); if (!this.redis) { this.redis = new RedisSubscriber( { userID: viewer.userID, sessionID: viewer.session }, this.onRedisMessage, ); } if (viewer.sessionChanged) { // This indicates that something has caused the session to change, which // shouldn't happen from inside a WebSocket since we can't handle cookie // invalidation. throw new ServerError('session_mutated_from_socket'); } if (clientSocketMessage.type !== clientSocketMessageTypes.PING) { handleAsyncPromise(updateCookie(viewer)); } for (const response of serverResponses) { // Normally it's an anti-pattern to await in sequence like this. But in // this case, we have a requirement that this array of serverResponses // is delivered in order. See here: // https://github.com/CommE2E/comm/blob/101eb34481deb49c609bfd2c785f375886e52666/keyserver/src/socket/socket.js#L566-L568 await this.sendMessage(response); } if (clientSocketMessage.type === clientSocketMessageTypes.INITIAL) { this.onSuccessfulConnection(); } } catch (error) { console.warn(error); if (!(error instanceof ServerError)) { const errorMessage: ErrorServerSocketMessage = { type: serverSocketMessageTypes.ERROR, message: error.message, }; const responseTo = clientSocketMessage ? clientSocketMessage.id : null; if (responseTo !== null) { errorMessage.responseTo = responseTo; } this.markActivityOccurred(); await this.sendMessage(errorMessage); return; } invariant(clientSocketMessage, 'should be set'); const responseTo = clientSocketMessage.id; if (error.message === 'socket_deauthorized') { invariant(this.viewer, 'should be set'); const authErrorMessage: AuthErrorServerSocketMessage = { type: serverSocketMessageTypes.AUTH_ERROR, responseTo, message: error.message, sessionChange: { cookie: this.viewer.cookiePairString, currentUserInfo: { anonymous: true, }, }, }; await this.sendMessage(authErrorMessage); this.ws.close(4100, error.message); return; } else if (error.message === 'client_version_unsupported') { const { viewer } = this; invariant(viewer, 'should be set'); const anonymousViewerDataPromise: Promise = createNewAnonymousCookie({ platformDetails: error.platformDetails, deviceToken: viewer.deviceToken, }); const deleteCookiePromise = deleteCookie(viewer.cookieID); const [anonymousViewerData] = await Promise.all([ anonymousViewerDataPromise, deleteCookiePromise, ]); // It is normally not safe to pass the result of // createNewAnonymousCookie to the Viewer constructor. That is because // createNewAnonymousCookie leaves several fields of // AnonymousViewerData unset, and consequently Viewer will throw when // access is attempted. It is only safe here because we can guarantee // that only cookiePairString and cookieID are accessed on anonViewer // below. const anonViewer = new Viewer(anonymousViewerData); const authErrorMessage: AuthErrorServerSocketMessage = { type: serverSocketMessageTypes.AUTH_ERROR, responseTo, message: error.message, sessionChange: { cookie: anonViewer.cookiePairString, currentUserInfo: { anonymous: true, }, }, }; await this.sendMessage(authErrorMessage); this.ws.close(4101, error.message); return; } if (error.payload) { await this.sendMessage({ type: serverSocketMessageTypes.ERROR, responseTo, message: error.message, payload: error.payload, }); } else { await this.sendMessage({ type: serverSocketMessageTypes.ERROR, responseTo, message: error.message, }); } if (error.message === 'not_logged_in') { this.ws.close(4102, error.message); } else if (error.message === 'session_mutated_from_socket') { this.ws.close(4103, error.message); } else { this.markActivityOccurred(); } } }; onClose = async () => { this.clearStateCheckTimeout(); this.resetTimeout.cancel(); this.debouncedAfterActivity.cancel(); if (this.viewer && this.viewer.hasSessionInfo) { await deleteActivityForViewerSession(this.viewer); } if (this.redis) { this.redis.quit(); this.redis = null; } }; sendMessage = async (message: ServerServerSocketMessage) => { invariant( this.ws.readyState > 0, "shouldn't send message until connection established", ); if (this.ws.readyState !== 1) { return; } const { viewer } = this; const validatedMessage = validateOutput( viewer?.platformDetails, serverServerSocketMessageValidator, message, ); const stringMessage = JSON.stringify(validatedMessage); if ( !viewer?.platformDetails || !hasMinCodeVersion(viewer.platformDetails, minVersionsForCompression) || !isStaff(viewer.id) ) { this.ws.send(stringMessage); return; } const compressionResult = await compressMessage(stringMessage); if (this.ws.readyState !== 1) { return; } if (!compressionResult.compressed) { this.ws.send(stringMessage); return; } const compressedMessage = { type: serverSocketMessageTypes.COMPRESSED_MESSAGE, payload: compressionResult.result, }; const validatedCompressedMessage = validateOutput( viewer?.platformDetails, serverServerSocketMessageValidator, compressedMessage, ); const stringCompressedMessage = JSON.stringify(validatedCompressedMessage); this.ws.send(stringCompressedMessage); }; async handleClientSocketMessage( message: ClientSocketMessage, ): Promise { const resultPromise: Promise = (async () => { if (message.type === clientSocketMessageTypes.INITIAL) { this.markActivityOccurred(); return await this.handleInitialClientSocketMessage(message); } else if (message.type === clientSocketMessageTypes.RESPONSES) { this.markActivityOccurred(); return await this.handleResponsesClientSocketMessage(message); } else if (message.type === clientSocketMessageTypes.PING) { return this.handlePingClientSocketMessage(message); } else if (message.type === clientSocketMessageTypes.ACK_UPDATES) { this.markActivityOccurred(); return await this.handleAckUpdatesClientSocketMessage(message); } else if (message.type === clientSocketMessageTypes.API_REQUEST) { this.markActivityOccurred(); return await this.handleAPIRequestClientSocketMessage(message); } return []; })(); const timeoutPromise: Promise = (async () => { await sleep(serverResponseTimeout); throw new ServerError('socket_response_timeout'); })(); return await Promise.race([resultPromise, timeoutPromise]); } async handleInitialClientSocketMessage( message: InitialClientSocketMessage, ): Promise { const { viewer } = this; invariant(viewer, 'should be set'); const responses: Array = []; const { sessionState, clientResponses } = message.payload; const { calendarQuery, updatesCurrentAsOf: oldUpdatesCurrentAsOf, messagesCurrentAsOf: oldMessagesCurrentAsOf, watchedIDs, } = sessionState; await verifyCalendarQueryThreadIDs(calendarQuery); const sessionInitializationResult = await initializeSession( viewer, calendarQuery, oldUpdatesCurrentAsOf, ); const threadCursors: { [string]: null } = {}; for (const watchedThreadID of watchedIDs) { threadCursors[watchedThreadID] = null; } const messageSelectionCriteria = { threadCursors, joinedThreads: true, newerThan: oldMessagesCurrentAsOf, }; const [fetchMessagesResult, { serverRequests, activityUpdateResult }] = await Promise.all([ fetchMessageInfosSince( viewer, messageSelectionCriteria, defaultNumberPerThread, ), processClientResponses(viewer, clientResponses), ]); const messagesResult = { rawMessageInfos: fetchMessagesResult.rawMessageInfos, truncationStatuses: fetchMessagesResult.truncationStatuses, currentAsOf: mostRecentMessageTimestamp( fetchMessagesResult.rawMessageInfos, oldMessagesCurrentAsOf, ), }; const isCookieMissingSignedIdentityKeysBlobPromise = isCookieMissingSignedIdentityKeysBlob(viewer.cookieID); const isCookieMissingOlmNotificationsSessionPromise = isCookieMissingOlmNotificationsSession(viewer); if (!sessionInitializationResult.sessionContinued) { const promises: { +[string]: Promise } = Object.fromEntries( values(serverStateSyncSpecs).map(spec => [ spec.hashKey, spec.fetchFullSocketSyncPayload(viewer, [calendarQuery]), ]), ); // We have a type error here because Flow doesn't know spec.hashKey const castPromises: { +threadInfos: Promise, +currentUserInfo: Promise, +entryInfos: Promise<$ReadOnlyArray>, +userInfos: Promise<$ReadOnlyArray>, } = (promises: any); const results = await promiseAll(castPromises); const payload: ServerStateSyncFullSocketPayload = { type: stateSyncPayloadTypes.FULL, messagesResult, threadInfos: results.threadInfos, currentUserInfo: results.currentUserInfo, rawEntryInfos: results.entryInfos, userInfos: results.userInfos, updatesCurrentAsOf: oldUpdatesCurrentAsOf, }; if (viewer.sessionChanged) { // If initializeSession encounters, // sessionIdentifierTypes.BODY_SESSION_ID but the session // is unspecified or expired, // it will set a new sessionID and specify viewer.sessionChanged const { sessionID } = viewer; invariant( sessionID !== null && sessionID !== undefined, 'should be set', ); payload.sessionID = sessionID; viewer.sessionChanged = false; } responses.push({ type: serverSocketMessageTypes.STATE_SYNC, responseTo: message.id, payload, }); } else { const { sessionUpdate, deltaEntryInfoResult } = sessionInitializationResult; const deleteExpiredUpdatesPromise = deleteUpdatesBeforeTimeTargetingSession(viewer, oldUpdatesCurrentAsOf); const fetchUpdateResultPromise = fetchUpdateInfos( viewer, oldUpdatesCurrentAsOf, calendarQuery, ); const sessionUpdatePromise = commitSessionUpdate(viewer, sessionUpdate); const [fetchUpdateResult] = await Promise.all([ fetchUpdateResultPromise, deleteExpiredUpdatesPromise, sessionUpdatePromise, ]); const { updateInfos, userInfos } = fetchUpdateResult; const newUpdatesCurrentAsOf = mostRecentUpdateTimestamp( [...updateInfos], oldUpdatesCurrentAsOf, ); const updatesResult = { newUpdates: updateInfos, currentAsOf: newUpdatesCurrentAsOf, }; responses.push({ type: serverSocketMessageTypes.STATE_SYNC, responseTo: message.id, payload: { type: stateSyncPayloadTypes.INCREMENTAL, messagesResult, updatesResult, deltaEntryInfos: deltaEntryInfoResult.rawEntryInfos, deletedEntryIDs: deltaEntryInfoResult.deletedEntryIDs, userInfos: values(userInfos), }, }); } const [signedIdentityKeysBlobMissing, olmNotificationsSessionMissing] = await Promise.all([ isCookieMissingSignedIdentityKeysBlobPromise, isCookieMissingOlmNotificationsSessionPromise, ]); if (signedIdentityKeysBlobMissing) { serverRequests.push({ type: serverRequestTypes.SIGNED_IDENTITY_KEYS_BLOB, }); } if (olmNotificationsSessionMissing) { serverRequests.push({ type: serverRequestTypes.INITIAL_NOTIFICATIONS_ENCRYPTED_MESSAGE, }); } if (serverRequests.length > 0 || clientResponses.length > 0) { // We send this message first since the STATE_SYNC triggers the client's // connection status to shift to "connected", and we want to make sure the // client responses are cleared from Redux before that happens responses.unshift({ type: serverSocketMessageTypes.REQUESTS, responseTo: message.id, payload: { serverRequests }, }); } if (activityUpdateResult) { // Same reason for unshifting as above responses.unshift({ type: serverSocketMessageTypes.ACTIVITY_UPDATE_RESPONSE, responseTo: message.id, payload: activityUpdateResult, }); } return responses; } async handleResponsesClientSocketMessage( message: ResponsesClientSocketMessage, ): Promise { const { viewer } = this; invariant(viewer, 'should be set'); const { clientResponses } = message.payload; const { stateCheckStatus } = await processClientResponses( viewer, clientResponses, ); const serverRequests = []; if (stateCheckStatus && stateCheckStatus.status !== 'state_check') { const { sessionUpdate, checkStateRequest } = await checkState( viewer, stateCheckStatus, ); if (sessionUpdate) { await commitSessionUpdate(viewer, sessionUpdate); this.setStateCheckConditions({ stateCheckOngoing: false }); } if (checkStateRequest) { serverRequests.push(checkStateRequest); } } // We send a response message regardless of whether we have any requests, // since we need to ack the client's responses return [ { type: serverSocketMessageTypes.REQUESTS, responseTo: message.id, payload: { serverRequests }, }, ]; } handlePingClientSocketMessage( message: PingClientSocketMessage, ): ServerServerSocketMessage[] { return [ { type: serverSocketMessageTypes.PONG, responseTo: message.id, }, ]; } async handleAckUpdatesClientSocketMessage( message: AckUpdatesClientSocketMessage, ): Promise { const { viewer } = this; invariant(viewer, 'should be set'); const { currentAsOf } = message.payload; await Promise.all([ deleteUpdatesBeforeTimeTargetingSession(viewer, currentAsOf), commitSessionUpdate(viewer, { lastUpdate: currentAsOf }), ]); return []; } async handleAPIRequestClientSocketMessage( message: APIRequestClientSocketMessage, ): Promise { if (!endpointIsSocketSafe(message.payload.endpoint)) { throw new ServerError('endpoint_unsafe_for_socket'); } const { viewer } = this; invariant(viewer, 'should be set'); const responder = jsonEndpoints[message.payload.endpoint]; await policiesValidator(viewer, responder.requiredPolicies); const response = await responder.responder(viewer, message.payload.input); return [ { type: serverSocketMessageTypes.API_RESPONSE, responseTo: message.id, payload: response, }, ]; } onRedisMessage = async (message: RedisMessage) => { try { await this.processRedisMessage(message); } catch (e) { console.warn(e); } }; async processRedisMessage(message: RedisMessage) { if (message.type === redisMessageTypes.START_SUBSCRIPTION) { this.ws.terminate(); } else if (message.type === redisMessageTypes.NEW_UPDATES) { const { viewer } = this; invariant(viewer, 'should be set'); if (message.ignoreSession && message.ignoreSession === viewer.session) { return; } const rawUpdateInfos = message.updates; this.redisPromiseResolver.add( (async () => { const { updateInfos, userInfos } = await fetchUpdateInfosWithRawUpdateInfos(rawUpdateInfos, { viewer, }); if (updateInfos.length === 0) { console.warn( 'could not get any UpdateInfos from redisMessageTypes.NEW_UPDATES', ); return null; } this.markActivityOccurred(); return { type: serverSocketMessageTypes.UPDATES, payload: { updatesResult: { currentAsOf: mostRecentUpdateTimestamp([...updateInfos], 0), newUpdates: updateInfos, }, userInfos: values(userInfos), }, }; })(), ); } else if (message.type === redisMessageTypes.NEW_MESSAGES) { const { viewer } = this; invariant(viewer, 'should be set'); const rawMessageInfos = message.messages; const messageFetchResult = getMessageFetchResultFromRedisMessages( viewer, rawMessageInfos, ); if (messageFetchResult.rawMessageInfos.length === 0) { console.warn( 'could not get any rawMessageInfos from ' + 'redisMessageTypes.NEW_MESSAGES', ); return; } this.redisPromiseResolver.add( (async () => { this.markActivityOccurred(); return { type: serverSocketMessageTypes.MESSAGES, payload: { messagesResult: { rawMessageInfos: messageFetchResult.rawMessageInfos, truncationStatuses: messageFetchResult.truncationStatuses, currentAsOf: mostRecentMessageTimestamp( messageFetchResult.rawMessageInfos, 0, ), }, }, }; })(), ); } } onSuccessfulConnection() { if (this.ws.readyState !== 1) { return; } this.handleStateCheckConditionsUpdate(); } // The Socket will timeout by calling this.ws.terminate() // serverRequestSocketTimeout milliseconds after the last // time resetTimeout is called resetTimeout = _debounce( () => this.ws.terminate(), serverRequestSocketTimeout, ); debouncedAfterActivity = _debounce( () => this.setStateCheckConditions({ activityRecentlyOccurred: false }), stateCheckInactivityActivationInterval, ); markActivityOccurred = () => { if (this.ws.readyState !== 1) { return; } this.setStateCheckConditions({ activityRecentlyOccurred: true }); this.debouncedAfterActivity(); }; clearStateCheckTimeout() { const { stateCheckTimeoutID } = this; if (stateCheckTimeoutID) { clearTimeout(stateCheckTimeoutID); this.stateCheckTimeoutID = null; } } setStateCheckConditions(newConditions: Shape) { this.stateCheckConditions = { ...this.stateCheckConditions, ...newConditions, }; this.handleStateCheckConditionsUpdate(); } get stateCheckCanStart() { return Object.values(this.stateCheckConditions).every(cond => !cond); } handleStateCheckConditionsUpdate() { if (!this.stateCheckCanStart) { this.clearStateCheckTimeout(); return; } if (this.stateCheckTimeoutID) { return; } const { viewer } = this; if (!viewer) { return; } const timeUntilStateCheck = viewer.sessionLastValidated + sessionCheckFrequency - Date.now(); if (timeUntilStateCheck <= 0) { this.initiateStateCheck(); } else { this.stateCheckTimeoutID = setTimeout( this.initiateStateCheck, timeUntilStateCheck, ); } } initiateStateCheck = async () => { this.setStateCheckConditions({ stateCheckOngoing: true }); const { viewer } = this; invariant(viewer, 'should be set'); const { checkStateRequest } = await checkState(viewer, { status: 'state_check', }); invariant(checkStateRequest, 'should be set'); await this.sendMessage({ type: serverSocketMessageTypes.REQUESTS, payload: { serverRequests: [checkStateRequest] }, }); }; } export { onConnection }; diff --git a/keyserver/src/updaters/activity-updaters.js b/keyserver/src/updaters/activity-updaters.js index c93ff559b..607330fb7 100644 --- a/keyserver/src/updaters/activity-updaters.js +++ b/keyserver/src/updaters/activity-updaters.js @@ -1,513 +1,514 @@ // @flow import invariant from 'invariant'; import _difference from 'lodash/fp/difference.js'; import _max from 'lodash/fp/max.js'; import { localIDPrefix } from 'lib/shared/message-utils.js'; import type { + ActivityUpdate, UpdateActivityResult, UpdateActivityRequest, SetThreadUnreadStatusRequest, SetThreadUnreadStatusResult, } from 'lib/types/activity-types.js'; import { messageTypes } from 'lib/types/message-types-enum.js'; import { threadPermissions } from 'lib/types/thread-permission-types.js'; import { updateTypes } from 'lib/types/update-types-enum.js'; import { ServerError } from 'lib/utils/errors.js'; import { createUpdates } from '../creators/update-creator.js'; import { dbQuery, SQL, mergeOrConditions } from '../database/database.js'; import type { SQLStatementType } from '../database/types.js'; import { deleteActivityForViewerSession } from '../deleters/activity-deleters.js'; import { checkThread, getValidThreads, } from '../fetchers/thread-permission-fetchers.js'; import { rescindPushNotifs } from '../push/rescind.js'; import { updateBadgeCount } from '../push/send.js'; import type { Viewer } from '../session/viewer.js'; import { earliestFocusedTimeConsideredExpired } from '../shared/focused-times.js'; type PartialThreadStatus = { +focusActive: boolean, +threadID: string, +newLastReadMessage: ?number, }; type ThreadStatus = | { +focusActive: true, +threadID: string, +newLastReadMessage: number, +curLastReadMessage: number, +rescindCondition: SQLStatementType, } | { +focusActive: false, +threadID: string, +newLastReadMessage: ?number, +curLastReadMessage: number, +rescindCondition: ?SQLStatementType, +newerMessageFromOtherAuthor: boolean, }; async function activityUpdater( viewer: Viewer, request: UpdateActivityRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } - const focusUpdatesByThreadID = new Map(); + const focusUpdatesByThreadID = new Map>(); for (const activityUpdate of request.updates) { const threadID = activityUpdate.threadID; const updatesForThreadID = focusUpdatesByThreadID.get(threadID) ?? []; if (!focusUpdatesByThreadID.has(threadID)) { focusUpdatesByThreadID.set(threadID, updatesForThreadID); } updatesForThreadID.push(activityUpdate); } const unverifiedThreadIDs: $ReadOnlySet = new Set( request.updates.map(update => update.threadID), ); const verifiedThreadsData = await getValidThreads( viewer, [...unverifiedThreadIDs], [ { check: 'permission', permission: threadPermissions.VISIBLE, }, ], ); if (verifiedThreadsData.length === 0) { return { unfocusedToUnread: [] }; } - const memberThreadIDs = new Set(); + const memberThreadIDs = new Set(); const verifiedThreadIDs = []; for (const threadData of verifiedThreadsData) { if (threadData.role > 0) { memberThreadIDs.add(threadData.threadID); } verifiedThreadIDs.push(threadData.threadID); } const partialThreadStatuses: PartialThreadStatus[] = []; for (const threadID of verifiedThreadIDs) { const focusUpdates = focusUpdatesByThreadID.get(threadID); invariant(focusUpdates, `no focusUpdate for thread ID ${threadID}`); const focusActive = !focusUpdates.some(update => !update.focus); const newLastReadMessage = _max( focusUpdates .filter( update => update.latestMessage && !update.latestMessage.startsWith(localIDPrefix), ) .map(update => parseInt(update.latestMessage)), ); partialThreadStatuses.push({ threadID, focusActive, newLastReadMessage, }); } // We update the focused rows before we check for new messages so we can // guarantee that any messages that may have set the thread to unread before // we set it to focused are caught and overriden await updateFocusedRows(viewer, partialThreadStatuses); if (memberThreadIDs.size === 0) { return { unfocusedToUnread: [] }; } const memberPartialThreadStatuses = partialThreadStatuses.filter( partialStatus => memberThreadIDs.has(partialStatus.threadID), ); const unfocusedLatestMessages = new Map(); for (const partialThreadStatus of memberPartialThreadStatuses) { const { threadID, focusActive, newLastReadMessage } = partialThreadStatus; if (!focusActive) { unfocusedLatestMessages.set(threadID, newLastReadMessage ?? 0); } } const [unfocusedThreadsWithNewerMessages, lastMessageInfos] = await Promise.all([ checkForNewerMessages(viewer, unfocusedLatestMessages), fetchLastMessageInfo(viewer, [...memberThreadIDs]), ]); const threadStatuses: ThreadStatus[] = []; for (const partialThreadStatus of memberPartialThreadStatuses) { const { threadID, focusActive, newLastReadMessage } = partialThreadStatus; const lastMessageInfo = lastMessageInfos.get(threadID); invariant( lastMessageInfo !== undefined, `no lastMessageInfo for thread ID ${threadID}`, ); const { lastMessage, lastReadMessage: curLastReadMessage } = lastMessageInfo; if (focusActive) { threadStatuses.push({ focusActive: true, threadID, newLastReadMessage: newLastReadMessage ? Math.max(lastMessage, newLastReadMessage) : lastMessage, curLastReadMessage, rescindCondition: SQL`n.thread = ${threadID}`, }); } else { threadStatuses.push({ focusActive: false, threadID, newLastReadMessage, curLastReadMessage, rescindCondition: newLastReadMessage ? SQL`(n.thread = ${threadID} AND n.message <= ${newLastReadMessage})` : null, newerMessageFromOtherAuthor: unfocusedThreadsWithNewerMessages.has(threadID), }); } } // The following block determines whether to enqueue updates for a given // (user, thread) pair and whether to propagate badge count notifs to all of // that user's devices const setUnread: Array<{ +threadID: string, +unread: boolean }> = []; for (const threadStatus of threadStatuses) { const { threadID, curLastReadMessage } = threadStatus; if (!threadStatus.focusActive) { const { newLastReadMessage, newerMessageFromOtherAuthor } = threadStatus; if (newerMessageFromOtherAuthor) { setUnread.push({ threadID, unread: true }); } else if (!newLastReadMessage) { // This is a rare edge case. It should only be possible for threads that // have zero messages on both the client and server, which shouldn't // happen. In this case we'll set the thread to read, just in case... console.warn(`thread ID ${threadID} appears to have no messages`); setUnread.push({ threadID, unread: false }); } else if (newLastReadMessage > curLastReadMessage) { setUnread.push({ threadID, unread: false }); } } else { const { newLastReadMessage } = threadStatus; if (newLastReadMessage > curLastReadMessage) { setUnread.push({ threadID, unread: false }); } } } const time = Date.now(); const updateDatas = setUnread.map(({ threadID, unread }) => ({ type: updateTypes.UPDATE_THREAD_READ_STATUS, userID: viewer.userID, time, threadID, unread, })); const latestMessages = new Map(); for (const threadStatus of threadStatuses) { const { threadID, newLastReadMessage, curLastReadMessage } = threadStatus; if (newLastReadMessage && newLastReadMessage > curLastReadMessage) { latestMessages.set(threadID, newLastReadMessage); } } await Promise.all([ updateLastReadMessage(viewer, latestMessages), createUpdates(updateDatas, { viewer, updatesForCurrentSession: 'ignore' }), ]); // We do this afterwards so the badge count is correct const rescindConditions = threadStatuses .map(({ rescindCondition }) => rescindCondition) .filter(Boolean); let rescindCondition; if (rescindConditions.length > 0) { rescindCondition = SQL`n.user = ${viewer.userID} AND `; rescindCondition.append(mergeOrConditions(rescindConditions)); } await rescindAndUpdateBadgeCounts( viewer, rescindCondition, updateDatas.length > 0 ? 'activity_update' : null, ); return { unfocusedToUnread: [...unfocusedThreadsWithNewerMessages] }; } async function updateFocusedRows( viewer: Viewer, partialThreadStatuses: $ReadOnlyArray, ): Promise { const threadIDs = partialThreadStatuses .filter(threadStatus => threadStatus.focusActive) .map(({ threadID }) => threadID); const time = Date.now(); if (threadIDs.length > 0) { const focusedInsertRows = threadIDs.map(threadID => [ viewer.userID, viewer.session, threadID, time, ]); const query = SQL` INSERT INTO focused (user, session, thread, time) VALUES ${focusedInsertRows} ON DUPLICATE KEY UPDATE time = VALUE(time) `; await dbQuery(query); } if (viewer.hasSessionInfo) { await deleteActivityForViewerSession(viewer, time); } } // To protect against a possible race condition, we reset the thread to unread // if the latest message ID on the client at the time that focus was dropped // is no longer the latest message ID. // Returns the set of unfocused threads that should be set to unread on // the client because a new message arrived since they were unfocused. async function checkForNewerMessages( viewer: Viewer, latestMessages: Map, ): Promise> { if (latestMessages.size === 0 || !viewer.loggedIn) { return new Set(); } const unfocusedThreadIDs = [...latestMessages.keys()]; const focusedElsewhereThreadIDs = await checkThreadsFocused( viewer, unfocusedThreadIDs, ); const unreadCandidates = _difference(unfocusedThreadIDs)( focusedElsewhereThreadIDs, ); if (unreadCandidates.length === 0) { return new Set(); } const knowOfExtractString = `$.${threadPermissions.KNOW_OF}.value`; const query = SQL` SELECT m.thread, MAX(m.id) AS latest_message FROM messages m LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = ${viewer.userID} WHERE m.thread IN (${unreadCandidates}) AND m.user != ${viewer.userID} AND ( m.type != ${messageTypes.CREATE_SUB_THREAD} OR JSON_EXTRACT(stm.permissions, ${knowOfExtractString}) IS TRUE ) GROUP BY m.thread `; const [result] = await dbQuery(query); - const threadsWithNewerMessages = new Set(); + const threadsWithNewerMessages = new Set(); for (const row of result) { const threadID = row.thread.toString(); const serverLatestMessage = row.latest_message; const clientLatestMessage = latestMessages.get(threadID); if (clientLatestMessage < serverLatestMessage) { threadsWithNewerMessages.add(threadID); } } return threadsWithNewerMessages; } async function checkThreadsFocused( viewer: Viewer, threadIDs: $ReadOnlyArray, ): Promise { const time = earliestFocusedTimeConsideredExpired(); const query = SQL` SELECT thread FROM focused WHERE time > ${time} AND user = ${viewer.userID} AND thread IN (${threadIDs}) GROUP BY thread `; const [result] = await dbQuery(query); const focusedThreadIDs = []; for (const row of result) { focusedThreadIDs.push(row.thread.toString()); } return focusedThreadIDs; } async function updateLastReadMessage( viewer: Viewer, lastReadMessages: $ReadOnlyMap, ) { if (lastReadMessages.size === 0) { return; } const query = SQL` UPDATE memberships SET last_read_message = GREATEST(last_read_message, CASE `; lastReadMessages.forEach((lastMessage, threadID) => { query.append(SQL` WHEN thread = ${threadID} THEN ${lastMessage} `); }); query.append(SQL` ELSE last_read_message END) WHERE thread IN (${[...lastReadMessages.keys()]}) AND user = ${viewer.userID} `); await dbQuery(query); } async function setThreadUnreadStatus( viewer: Viewer, request: SetThreadUnreadStatusRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const isMemberAndCanViewThread = await checkThread(viewer, request.threadID, [ { check: 'is_member', }, { check: 'permission', permission: threadPermissions.VISIBLE, }, ]); if (!isMemberAndCanViewThread) { throw new ServerError('invalid_parameters'); } const resetThreadToUnread = await shouldResetThreadToUnread(viewer, request); if (!resetThreadToUnread) { const lastReadMessage = request.unread ? SQL`0` : SQL`GREATEST(m.last_read_message, ${request.latestMessage ?? 0})`; const update = SQL` UPDATE memberships m SET m.last_read_message = `; update.append(lastReadMessage); update.append(SQL` WHERE m.thread = ${request.threadID} AND m.user = ${viewer.userID} `); const queryPromise = dbQuery(update); const time = Date.now(); const updatesPromise = createUpdates( [ { type: updateTypes.UPDATE_THREAD_READ_STATUS, userID: viewer.userID, time: time, threadID: request.threadID, unread: request.unread, }, ], { viewer, updatesForCurrentSession: 'ignore' }, ); await Promise.all([updatesPromise, queryPromise]); } let rescindCondition; if (!request.unread) { rescindCondition = SQL` n.user = ${viewer.userID} AND n.thread = ${request.threadID} AND n.message <= ${request.latestMessage} `; } await rescindAndUpdateBadgeCounts( viewer, rescindCondition, request.unread ? 'mark_as_unread' : 'mark_as_read', ); return { resetToUnread: resetThreadToUnread, }; } async function rescindAndUpdateBadgeCounts( viewer: Viewer, rescindCondition: ?SQLStatementType, badgeCountUpdateSource: ?( | 'activity_update' | 'mark_as_unread' | 'mark_as_read' ), ) { const notificationPromises = []; if (rescindCondition) { notificationPromises.push(rescindPushNotifs(rescindCondition)); } if (badgeCountUpdateSource) { notificationPromises.push(updateBadgeCount(viewer, badgeCountUpdateSource)); } await Promise.all(notificationPromises); } async function shouldResetThreadToUnread( viewer: Viewer, request: SetThreadUnreadStatusRequest, ): Promise { if (request.unread) { return false; } const threadsWithNewerMessages = await checkForNewerMessages( viewer, new Map([[request.threadID, parseInt(request.latestMessage) || 0]]), ); return threadsWithNewerMessages.has(request.threadID); } type LastMessageInfo = { +lastMessage: number, +lastReadMessage: number, }; async function fetchLastMessageInfo( viewer: Viewer, threadIDs: $ReadOnlyArray, ) { const query = SQL` SELECT thread, last_message, last_read_message FROM memberships WHERE user = ${viewer.userID} AND thread IN (${threadIDs}) `; const [result] = await dbQuery(query); const lastMessages = new Map(); for (const row of result) { const threadID = row.thread.toString(); const lastMessage = row.last_message; const lastReadMessage = row.last_read_message; lastMessages.set(threadID, { lastMessage, lastReadMessage }); } return lastMessages; } export { activityUpdater, setThreadUnreadStatus }; diff --git a/keyserver/src/updaters/relationship-updaters.js b/keyserver/src/updaters/relationship-updaters.js index 72bfa2efc..17885c634 100644 --- a/keyserver/src/updaters/relationship-updaters.js +++ b/keyserver/src/updaters/relationship-updaters.js @@ -1,371 +1,371 @@ // @flow import invariant from 'invariant'; import { sortIDs } from 'lib/shared/relationship-utils.js'; import { messageTypes } from 'lib/types/message-types-enum.js'; import { type RelationshipRequest, type RelationshipErrors, type UndirectedRelationshipRow, relationshipActions, undirectedStatus, directedStatus, } from 'lib/types/relationship-types.js'; import { threadTypes } from 'lib/types/thread-types-enum.js'; import type { NewThreadResponse } from 'lib/types/thread-types.js'; import { updateTypes } from 'lib/types/update-types-enum.js'; import { type UpdateData } from 'lib/types/update-types.js'; import { cartesianProduct } from 'lib/utils/array.js'; import { ServerError } from 'lib/utils/errors.js'; import { promiseAll } from 'lib/utils/promises.js'; import createMessages from '../creators/message-creator.js'; import { createThread } from '../creators/thread-creator.js'; import { createUpdates } from '../creators/update-creator.js'; import { dbQuery, SQL, mergeOrConditions } from '../database/database.js'; import { fetchFriendRequestRelationshipOperations } from '../fetchers/relationship-fetchers.js'; import { fetchUserInfos } from '../fetchers/user-fetchers.js'; import type { Viewer } from '../session/viewer.js'; async function updateRelationships( viewer: Viewer, request: RelationshipRequest, ): Promise { const { action } = request; if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const uniqueUserIDs = [...new Set(request.userIDs)]; const users = await fetchUserInfos(uniqueUserIDs); let errors: RelationshipErrors = {}; const userIDs: string[] = []; for (const userID of uniqueUserIDs) { if (userID === viewer.userID || !users[userID].username) { const acc = errors.invalid_user || []; errors.invalid_user = [...acc, userID]; } else { userIDs.push(userID); } } if (!userIDs.length) { return Object.freeze({ ...errors }); } const updateIDs = []; if (action === relationshipActions.FRIEND) { // We have to create personal threads before setting the relationship // status. By doing that we make sure that failed thread creation is // reported to the caller and can be repeated - there should be only // one PERSONAL thread per a pair of users and we can safely call it // repeatedly. const threadIDPerUser = await createPersonalThreads(viewer, request); const { userRelationshipOperations, errors: friendRequestErrors } = await fetchFriendRequestRelationshipOperations(viewer, userIDs); errors = { ...errors, ...friendRequestErrors }; const undirectedInsertRows = []; const directedInsertRows = []; const directedDeleteIDs = []; const messageDatas = []; const now = Date.now(); for (const userID in userRelationshipOperations) { const operations = userRelationshipOperations[userID]; const ids = sortIDs(viewer.userID, userID); if (operations.length) { updateIDs.push(userID); } for (const operation of operations) { if (operation === 'delete_directed') { directedDeleteIDs.push(userID); } else if (operation === 'friend') { const [user1, user2] = ids; const status = undirectedStatus.FRIEND; undirectedInsertRows.push({ user1, user2, status }); messageDatas.push({ type: messageTypes.UPDATE_RELATIONSHIP, threadID: threadIDPerUser[userID], creatorID: viewer.userID, targetID: userID, time: now, operation: 'request_accepted', }); } else if (operation === 'pending_friend') { const status = directedStatus.PENDING_FRIEND; directedInsertRows.push([viewer.userID, userID, status]); messageDatas.push({ type: messageTypes.UPDATE_RELATIONSHIP, threadID: threadIDPerUser[userID], creatorID: viewer.userID, targetID: userID, time: now, operation: 'request_sent', }); } else if (operation === 'know_of') { const [user1, user2] = ids; const status = undirectedStatus.KNOW_OF; undirectedInsertRows.push({ user1, user2, status }); } else { invariant(false, `unexpected relationship operation ${operation}`); } } } const promises: Array> = [ updateUndirectedRelationships(undirectedInsertRows), ]; if (directedInsertRows.length) { const directedInsertQuery = SQL` INSERT INTO relationships_directed (user1, user2, status) VALUES ${directedInsertRows} ON DUPLICATE KEY UPDATE status = VALUE(status) `; promises.push(dbQuery(directedInsertQuery)); } if (directedDeleteIDs.length) { const directedDeleteQuery = SQL` DELETE FROM relationships_directed WHERE (user1 = ${viewer.userID} AND user2 IN (${directedDeleteIDs})) OR (status = ${directedStatus.PENDING_FRIEND} AND user1 IN (${directedDeleteIDs}) AND user2 = ${viewer.userID}) `; promises.push(dbQuery(directedDeleteQuery)); } if (messageDatas.length > 0) { promises.push(createMessages(viewer, messageDatas, 'broadcast')); } await Promise.all(promises); } else if (action === relationshipActions.UNFRIEND) { updateIDs.push(...userIDs); const updateRows = userIDs.map(userID => { const [user1, user2] = sortIDs(viewer.userID, userID); return { user1, user2, status: undirectedStatus.KNOW_OF }; }); const deleteQuery = SQL` DELETE FROM relationships_directed WHERE status = ${directedStatus.PENDING_FRIEND} AND (user1 = ${viewer.userID} AND user2 IN (${userIDs}) OR user1 IN (${userIDs}) AND user2 = ${viewer.userID}) `; await Promise.all([ updateUndirectedRelationships(updateRows, false), dbQuery(deleteQuery), ]); } else if (action === relationshipActions.BLOCK) { updateIDs.push(...userIDs); const directedRows = []; const undirectedRows = []; for (const userID of userIDs) { directedRows.push([viewer.userID, userID, directedStatus.BLOCKED]); const [user1, user2] = sortIDs(viewer.userID, userID); undirectedRows.push({ user1, user2, status: undirectedStatus.KNOW_OF }); } const directedInsertQuery = SQL` INSERT INTO relationships_directed (user1, user2, status) VALUES ${directedRows} ON DUPLICATE KEY UPDATE status = VALUE(status) `; const directedDeleteQuery = SQL` DELETE FROM relationships_directed WHERE status = ${directedStatus.PENDING_FRIEND} AND user1 IN (${userIDs}) AND user2 = ${viewer.userID} `; await Promise.all([ dbQuery(directedInsertQuery), dbQuery(directedDeleteQuery), updateUndirectedRelationships(undirectedRows, false), ]); } else if (action === relationshipActions.UNBLOCK) { updateIDs.push(...userIDs); const query = SQL` DELETE FROM relationships_directed WHERE status = ${directedStatus.BLOCKED} AND user1 = ${viewer.userID} AND user2 IN (${userIDs}) `; await dbQuery(query); } else { invariant(false, `action ${action} is invalid or not supported currently`); } await createUpdates( updateDatasForUserPairs(cartesianProduct([viewer.userID], updateIDs)), ); return Object.freeze({ ...errors }); } function updateDatasForUserPairs( userPairs: $ReadOnlyArray<[string, string]>, ): UpdateData[] { const time = Date.now(); const updateDatas: Array = []; for (const [user1, user2] of userPairs) { updateDatas.push({ type: updateTypes.UPDATE_USER, userID: user1, time, updatedUserID: user2, }); updateDatas.push({ type: updateTypes.UPDATE_USER, userID: user2, time, updatedUserID: user1, }); } return updateDatas; } async function updateUndirectedRelationships( changeset: UndirectedRelationshipRow[], greatest: boolean = true, ) { if (!changeset.length) { return; } const rows = changeset.map(row => [row.user1, row.user2, row.status]); const query = SQL` INSERT INTO relationships_undirected (user1, user2, status) VALUES ${rows} `; if (greatest) { query.append( SQL`ON DUPLICATE KEY UPDATE status = GREATEST(status, VALUE(status))`, ); } else { query.append(SQL`ON DUPLICATE KEY UPDATE status = VALUE(status)`); } await dbQuery(query); } async function updateChangedUndirectedRelationships( changeset: UndirectedRelationshipRow[], ): Promise { if (changeset.length === 0) { return []; } const user2ByUser1: Map> = new Map(); for (const { user1, user2 } of changeset) { if (!user2ByUser1.has(user1)) { user2ByUser1.set(user1, new Set()); } user2ByUser1.get(user1)?.add(user2); } const selectQuery = SQL` SELECT user1, user2, status FROM relationships_undirected WHERE `; const conditions = []; for (const [user1, users] of user2ByUser1) { conditions.push(SQL`(user1 = ${user1} AND user2 IN (${[...users]}))`); } selectQuery.append(mergeOrConditions(conditions)); const [result] = await dbQuery(selectQuery); - const existingStatuses = new Map(); + const existingStatuses = new Map(); for (const row of result) { existingStatuses.set(`${row.user1}|${row.user2}`, row.status); } const insertRows = []; for (const row of changeset) { const existingStatus = existingStatuses.get(`${row.user1}|${row.user2}`); if (!existingStatus || existingStatus < row.status) { insertRows.push([row.user1, row.user2, row.status]); } } if (insertRows.length === 0) { return []; } const insertQuery = SQL` INSERT INTO relationships_undirected (user1, user2, status) VALUES ${insertRows} ON DUPLICATE KEY UPDATE status = GREATEST(status, VALUE(status)) `; await dbQuery(insertQuery); return updateDatasForUserPairs( insertRows.map(([user1, user2]) => [user1, user2]), ); } async function createPersonalThreads( viewer: Viewer, request: RelationshipRequest, ) { invariant( request.action === relationshipActions.FRIEND, 'We should only create a PERSONAL threads when sending a FRIEND request, ' + `but we tried to do that for ${request.action}`, ); const threadIDPerUser: { [string]: string } = {}; const personalThreadsQuery = SQL` SELECT t.id AS threadID, m2.user AS user2 FROM threads t INNER JOIN memberships m1 ON m1.thread = t.id AND m1.user = ${viewer.userID} INNER JOIN memberships m2 ON m2.thread = t.id AND m2.user IN (${request.userIDs}) WHERE t.type = ${threadTypes.PERSONAL} AND m1.role > 0 AND m2.role > 0 `; const [personalThreadsResult] = await dbQuery(personalThreadsQuery); for (const row of personalThreadsResult) { const user2 = row.user2.toString(); threadIDPerUser[user2] = row.threadID.toString(); } const threadCreationPromises: { [string]: Promise } = {}; for (const userID of request.userIDs) { if (threadIDPerUser[userID]) { continue; } threadCreationPromises[userID] = createThread( viewer, { type: threadTypes.PERSONAL, initialMemberIDs: [userID], }, { forceAddMembers: true, updatesForCurrentSession: 'broadcast' }, ); } const personalThreadPerUser = await promiseAll(threadCreationPromises); for (const userID in personalThreadPerUser) { const newThread = personalThreadPerUser[userID]; threadIDPerUser[userID] = newThread.newThreadID; } return threadIDPerUser; } export { updateRelationships, updateDatasForUserPairs, updateUndirectedRelationships, updateChangedUndirectedRelationships, }; diff --git a/keyserver/src/updaters/thread-permission-updaters.js b/keyserver/src/updaters/thread-permission-updaters.js index bcbccd15a..929ffecf7 100644 --- a/keyserver/src/updaters/thread-permission-updaters.js +++ b/keyserver/src/updaters/thread-permission-updaters.js @@ -1,1405 +1,1410 @@ // @flow import invariant from 'invariant'; import _isEqual from 'lodash/fp/isEqual.js'; import bots from 'lib/facts/bots.js'; import genesis from 'lib/facts/genesis.js'; import { specialRoles } from 'lib/permissions/special-roles.js'; import { makePermissionsBlob, makePermissionsForChildrenBlob, getRoleForPermissions, } from 'lib/permissions/thread-permissions.js'; import type { CalendarQuery } from 'lib/types/entry-types.js'; import { messageTypes } from 'lib/types/message-types-enum.js'; import type { ThreadPermissionsBlob, ThreadRolePermissionsBlob, } from 'lib/types/thread-permission-types.js'; import { threadPermissions } from 'lib/types/thread-permission-types.js'; import { type ThreadType, assertThreadType, } from 'lib/types/thread-types-enum.js'; import { updateTypes } from 'lib/types/update-types-enum.js'; import { type ServerUpdateInfo, type CreateUpdatesResult, type UpdateData, } from 'lib/types/update-types.js'; import { pushAll } from 'lib/utils/array.js'; import { ServerError } from 'lib/utils/errors.js'; import { updateChangedUndirectedRelationships } from './relationship-updaters.js'; import { createUpdates, type UpdatesForCurrentSession, } from '../creators/update-creator.js'; import { dbQuery, SQL } from '../database/database.js'; import { fetchServerThreadInfos, rawThreadInfosFromServerThreadInfos, } from '../fetchers/thread-fetchers.js'; import { rescindPushNotifs } from '../push/rescind.js'; import { createScriptViewer } from '../session/scripts.js'; import type { Viewer } from '../session/viewer.js'; import { updateRoles } from '../updaters/role-updaters.js'; import DepthQueue from '../utils/depth-queue.js'; import RelationshipChangeset from '../utils/relationship-changeset.js'; export type MembershipRowToSave = { +operation: 'save', +intent: 'join' | 'leave' | 'none', +userID: string, +threadID: string, +userNeedsFullThreadDetails: boolean, +permissions: ?ThreadPermissionsBlob, +permissionsForChildren: ?ThreadPermissionsBlob, // null role represents by "0" +role: string, +oldRole: string, +unread?: boolean, }; type MembershipRowToDelete = { +operation: 'delete', +intent: 'join' | 'leave' | 'none', +userID: string, +threadID: string, +oldRole: string, }; export type MembershipRow = MembershipRowToSave | MembershipRowToDelete; export type MembershipChangeset = { +membershipRows: MembershipRow[], +relationshipChangeset: RelationshipChangeset, }; // 0 role means to remove the user from the thread // null role means to set the user to the default role // string role means to set the user to the role with that ID // -1 role means to set the user as a "ghost" (former member) type ChangeRoleOptions = { +setNewMembersToUnread?: boolean, +forcePermissionRecalculation?: boolean, }; type ChangeRoleMemberInfo = { permissionsFromParent?: ?ThreadPermissionsBlob, memberOfContainingThread?: boolean, }; +type ExistingMembership = { + +oldRole: string, + +oldPermissions: ?ThreadPermissionsBlob, + +oldPermissionsForChildren: ?ThreadPermissionsBlob, +}; async function changeRole( threadID: string, userIDs: $ReadOnlyArray, role: string | -1 | 0 | null, options?: ChangeRoleOptions, ): Promise { const intent = role === -1 || role === 0 ? 'leave' : 'join'; const setNewMembersToUnread = options?.setNewMembersToUnread && intent === 'join'; const forcePermissionRecalculation = options?.forcePermissionRecalculation; if (userIDs.length === 0) { return { membershipRows: [], relationshipChangeset: new RelationshipChangeset(), }; } const membershipQuery = SQL` SELECT user, role, permissions, permissions_for_children FROM memberships WHERE thread = ${threadID} `; const parentMembershipQuery = SQL` SELECT pm.user, pm.permissions_for_children AS permissions_from_parent FROM threads t INNER JOIN memberships pm ON pm.thread = t.parent_thread_id WHERE t.id = ${threadID} AND (pm.user IN (${userIDs}) OR t.parent_thread_id != ${genesis.id}) `; const containingMembershipQuery = SQL` SELECT cm.user, cm.role AS containing_role FROM threads t INNER JOIN memberships cm ON cm.thread = t.containing_thread_id WHERE t.id = ${threadID} AND cm.user IN (${userIDs}) `; const containingMembershipPromise: Promise< $ReadOnlyArray<{ +user: number, +containing_role: number, }>, > = (async () => { if (intent === 'leave') { // Membership in the container only needs to be checked for members return []; } const [result] = await dbQuery(containingMembershipQuery); return result; })(); const [ [membershipResults], [parentMembershipResults], containingMembershipResults, roleThreadResult, ] = await Promise.all([ dbQuery(membershipQuery), dbQuery(parentMembershipQuery), containingMembershipPromise, changeRoleThreadQuery(threadID, role), ]); const { roleColumnValue: intendedRole, threadType, parentThreadID, hasContainingThreadID, rolePermissions: intendedRolePermissions, depth, } = roleThreadResult; - const existingMembershipInfo = new Map(); + const existingMembershipInfo = new Map(); for (const row of membershipResults) { const userID = row.user.toString(); existingMembershipInfo.set(userID, { oldRole: row.role.toString(), oldPermissions: JSON.parse(row.permissions), oldPermissionsForChildren: JSON.parse(row.permissions_for_children), }); } const ancestorMembershipInfo: Map = new Map(); for (const row of parentMembershipResults) { const userID = row.user.toString(); if (!userIDs.includes(userID)) { continue; } ancestorMembershipInfo.set(userID, { permissionsFromParent: JSON.parse(row.permissions_from_parent), }); } for (const row of containingMembershipResults) { const userID = row.user.toString(); const ancestorMembership = ancestorMembershipInfo.get(userID); const memberOfContainingThread = row.containing_role > 0; if (ancestorMembership) { ancestorMembership.memberOfContainingThread = memberOfContainingThread; } else { ancestorMembershipInfo.set(userID, { memberOfContainingThread, }); } } const relationshipChangeset = new RelationshipChangeset(); const existingMemberIDs = [...existingMembershipInfo.keys()]; if (threadID !== genesis.id) { relationshipChangeset.setAllRelationshipsExist(existingMemberIDs); } const parentMemberIDs = parentMembershipResults.map(row => row.user.toString(), ); if (parentThreadID && parentThreadID !== genesis.id) { relationshipChangeset.setAllRelationshipsExist(parentMemberIDs); } const membershipRows: Array = []; - const toUpdateDescendants = new Map(); + const toUpdateDescendants = new Map(); for (const userID of userIDs) { const existingMembership = existingMembershipInfo.get(userID); const oldRole = existingMembership?.oldRole ?? '-1'; const oldPermissions = existingMembership?.oldPermissions ?? null; const oldPermissionsForChildren = existingMembership?.oldPermissionsForChildren ?? null; if ( existingMembership && oldRole === intendedRole && !forcePermissionRecalculation ) { // If the old role is the same as the new one, we have nothing to update continue; } else if (Number(oldRole) > 0 && role === null) { // In the case where we're just trying to add somebody to a thread, if // they already have a role with a nonzero role then we don't need to do // anything continue; } let permissionsFromParent = null; let memberOfContainingThread = false; const ancestorMembership = ancestorMembershipInfo.get(userID); if (ancestorMembership) { permissionsFromParent = ancestorMembership.permissionsFromParent; memberOfContainingThread = !!ancestorMembership.memberOfContainingThread; } if (!hasContainingThreadID) { memberOfContainingThread = true; } const rolePermissions = memberOfContainingThread ? intendedRolePermissions : null; const targetRole = memberOfContainingThread ? intendedRole : '-1'; const permissions = makePermissionsBlob( rolePermissions, permissionsFromParent, threadID, threadType, ); const permissionsForChildren = makePermissionsForChildrenBlob(permissions); const newRole = getRoleForPermissions(targetRole, permissions); const userBecameMember = Number(oldRole) <= 0 && Number(newRole) > 0; const userLostMembership = Number(oldRole) > 0 && Number(newRole) <= 0; if ( (intent === 'join' && Number(newRole) <= 0) || (intent === 'leave' && Number(newRole) > 0) ) { throw new ServerError('invalid_parameters'); } else if (intendedRole !== newRole) { console.warn( `changeRole called for role=${intendedRole}, but ended up setting ` + `role=${newRole} for userID ${userID} and threadID ${threadID}, ` + 'probably because KNOW_OF permission was unexpectedly present or ' + 'missing', ); } if ( existingMembership && _isEqual(permissions)(oldPermissions) && oldRole === newRole ) { // This thread and all of its descendants need no updates for this user, // since the corresponding memberships row is unchanged by this operation continue; } if (permissions) { membershipRows.push({ operation: 'save', intent, userID, threadID, userNeedsFullThreadDetails: userBecameMember, permissions, permissionsForChildren, role: newRole, oldRole, unread: userBecameMember && setNewMembersToUnread, }); } else { membershipRows.push({ operation: 'delete', intent, userID, threadID, oldRole, }); } if (permissions && !existingMembership && threadID !== genesis.id) { relationshipChangeset.setRelationshipsNeeded(userID, existingMemberIDs); } if ( userLostMembership || !_isEqual(permissionsForChildren)(oldPermissionsForChildren) ) { toUpdateDescendants.set(userID, { userIsMember: Number(newRole) > 0, permissionsForChildren, }); } } if (toUpdateDescendants.size > 0) { const { membershipRows: descendantMembershipRows, relationshipChangeset: descendantRelationshipChangeset, } = await updateDescendantPermissions({ threadID, depth, changesByUser: toUpdateDescendants, }); pushAll(membershipRows, descendantMembershipRows); relationshipChangeset.addAll(descendantRelationshipChangeset); } return { membershipRows, relationshipChangeset }; } type RoleThreadResult = { +roleColumnValue: string, +depth: number, +threadType: ThreadType, +parentThreadID: ?string, +hasContainingThreadID: boolean, +rolePermissions: ?ThreadRolePermissionsBlob, }; async function changeRoleThreadQuery( threadID: string, role: string | -1 | 0 | null, ): Promise { if (role === 0 || role === -1) { const query = SQL` SELECT type, depth, parent_thread_id, containing_thread_id FROM threads WHERE id = ${threadID} `; const [result] = await dbQuery(query); if (result.length === 0) { throw new ServerError('internal_error'); } const row = result[0]; return { roleColumnValue: role.toString(), depth: row.depth, threadType: assertThreadType(row.type), parentThreadID: row.parent_thread_id ? row.parent_thread_id.toString() : null, hasContainingThreadID: row.containing_thread_id !== null, rolePermissions: null, }; } else if (role !== null) { const query = SQL` SELECT t.type, t.depth, t.parent_thread_id, t.containing_thread_id, r.permissions FROM threads t INNER JOIN roles r ON r.thread = t.id AND r.id = ${role} WHERE t.id = ${threadID} `; const [result] = await dbQuery(query); if (result.length === 0) { throw new ServerError('internal_error'); } const row = result[0]; return { roleColumnValue: role, depth: row.depth, threadType: assertThreadType(row.type), parentThreadID: row.parent_thread_id ? row.parent_thread_id.toString() : null, hasContainingThreadID: row.containing_thread_id !== null, rolePermissions: JSON.parse(row.permissions), }; } else { const query = SQL` SELECT t.type, t.depth, t.parent_thread_id, t.containing_thread_id, r.permissions, r.id FROM threads t INNER JOIN roles r ON r.thread = t.id AND r.special_role = ${specialRoles.DEFAULT_ROLE} WHERE t.id = ${threadID} `; const [result] = await dbQuery(query); if (result.length === 0) { throw new ServerError('internal_error'); } const row = result[0]; return { roleColumnValue: row.id.toString(), depth: row.depth, threadType: assertThreadType(row.type), parentThreadID: row.parent_thread_id ? row.parent_thread_id.toString() : null, hasContainingThreadID: row.containing_thread_id !== null, rolePermissions: JSON.parse(row.permissions), }; } } type ChangedAncestor = { +threadID: string, +depth: number, +changesByUser: Map, }; type AncestorChanges = { +userIsMember: boolean, +permissionsForChildren: ?ThreadPermissionsBlob, }; async function updateDescendantPermissions( initialChangedAncestor: ChangedAncestor, ): Promise { const membershipRows: Array = []; const relationshipChangeset = new RelationshipChangeset(); const initialDescendants = await fetchDescendantsForUpdate([ initialChangedAncestor, ]); const depthQueue = new DepthQueue( getDescendantDepth, getDescendantKey, mergeDescendants, ); depthQueue.addInfos(initialDescendants); let descendants; while ((descendants = depthQueue.getNextDepth())) { const descendantsAsAncestors = []; for (const descendant of descendants) { const { threadID, threadType, depth, users } = descendant; const existingMembers = [...users.entries()]; const existingMemberIDs = existingMembers .filter(([, { curRole }]) => curRole) .map(([userID]) => userID); if (threadID !== genesis.id) { relationshipChangeset.setAllRelationshipsExist(existingMemberIDs); } - const usersForNextLayer = new Map(); + const usersForNextLayer = new Map(); for (const [userID, user] of users) { const { curRolePermissions, curPermissionsFromParent, curMemberOfContainingThread, nextMemberOfContainingThread, nextPermissionsFromParent, potentiallyNeedsUpdate, } = user; const existingMembership = !!user.curRole; const curRole = user.curRole ?? '-1'; const curPermissions = user.curPermissions ?? null; const curPermissionsForChildren = user.curPermissionsForChildren ?? null; if (!potentiallyNeedsUpdate) { continue; } const permissionsFromParent = nextPermissionsFromParent === undefined ? curPermissionsFromParent : nextPermissionsFromParent; const memberOfContainingThread = nextMemberOfContainingThread === undefined ? curMemberOfContainingThread : nextMemberOfContainingThread; const targetRole = memberOfContainingThread ? curRole : '-1'; const rolePermissions = memberOfContainingThread ? curRolePermissions : null; const permissions = makePermissionsBlob( rolePermissions, permissionsFromParent, threadID, threadType, ); const permissionsForChildren = makePermissionsForChildrenBlob(permissions); const newRole = getRoleForPermissions(targetRole, permissions); const userLostMembership = Number(curRole) > 0 && Number(newRole) <= 0; if (_isEqual(permissions)(curPermissions) && curRole === newRole) { // This thread and all of its descendants need no updates for this // user, since the corresponding memberships row is unchanged by this // operation continue; } if (permissions) { membershipRows.push({ operation: 'save', intent: 'none', userID, threadID, userNeedsFullThreadDetails: false, permissions, permissionsForChildren, role: newRole, oldRole: curRole, }); } else { membershipRows.push({ operation: 'delete', intent: 'none', userID, threadID, oldRole: curRole, }); } if (permissions && !existingMembership && threadID !== genesis.id) { // If there was no membership row before, and we are creating one, // we'll need to make sure the new member has a relationship row with // each existing member. We expect that whoever called us already // generated memberships row for the new members, will will lead // saveMemberships to generate relationships rows between those new // users. relationshipChangeset.setRelationshipsNeeded( userID, existingMemberIDs, ); } if ( userLostMembership || !_isEqual(permissionsForChildren)(curPermissionsForChildren) ) { usersForNextLayer.set(userID, { userIsMember: Number(newRole) > 0, permissionsForChildren, }); } } if (usersForNextLayer.size > 0) { descendantsAsAncestors.push({ threadID, depth, changesByUser: usersForNextLayer, }); } } const nextDescendants = await fetchDescendantsForUpdate( descendantsAsAncestors, ); depthQueue.addInfos(nextDescendants); } return { membershipRows, relationshipChangeset }; } type DescendantUserInfo = $Shape<{ curRole?: string, curRolePermissions?: ?ThreadRolePermissionsBlob, curPermissions?: ?ThreadPermissionsBlob, curPermissionsForChildren?: ?ThreadPermissionsBlob, curPermissionsFromParent?: ?ThreadPermissionsBlob, curMemberOfContainingThread?: boolean, nextPermissionsFromParent?: ?ThreadPermissionsBlob, nextMemberOfContainingThread?: boolean, potentiallyNeedsUpdate?: boolean, }>; type DescendantInfo = { +threadID: string, +parentThreadID: string, +containingThreadID: string, +threadType: ThreadType, +depth: number, +users: Map, }; const fetchDescendantsBatchSize = 10; async function fetchDescendantsForUpdate( ancestors: $ReadOnlyArray, ): Promise { const threadIDs = ancestors.map(ancestor => ancestor.threadID); const rows: Array<{ +id: number, +user: number, +type: number, +depth: number, +parent_thread_id: number, +containing_thread_id: number, +role_permissions: string, +permissions: string, +permissions_for_children: string, +role: number, +permissions_from_parent: string | null, +containing_role: ?number, }> = []; while (threadIDs.length > 0) { const batch = threadIDs.splice(0, fetchDescendantsBatchSize); const query = SQL` SELECT t.id, m.user, t.type, t.depth, t.parent_thread_id, t.containing_thread_id, r.permissions AS role_permissions, m.permissions, m.permissions_for_children, m.role, pm.permissions_for_children AS permissions_from_parent, cm.role AS containing_role FROM threads t INNER JOIN memberships m ON m.thread = t.id LEFT JOIN memberships pm ON pm.thread = t.parent_thread_id AND pm.user = m.user LEFT JOIN memberships cm ON cm.thread = t.containing_thread_id AND cm.user = m.user LEFT JOIN roles r ON r.id = m.role WHERE t.parent_thread_id IN (${batch}) OR t.containing_thread_id IN (${batch}) `; const [results] = await dbQuery(query); pushAll(rows, results); } const descendantThreadInfos: Map = new Map(); for (const row of rows) { const descendantThreadID = row.id.toString(); if (!descendantThreadInfos.has(descendantThreadID)) { descendantThreadInfos.set(descendantThreadID, { threadID: descendantThreadID, parentThreadID: row.parent_thread_id.toString(), containingThreadID: row.containing_thread_id.toString(), threadType: assertThreadType(row.type), depth: row.depth, users: new Map(), }); } const descendantThreadInfo = descendantThreadInfos.get(descendantThreadID); invariant( descendantThreadInfo, `value should exist for key ${descendantThreadID}`, ); const userID = row.user.toString(); descendantThreadInfo.users.set(userID, { curRole: row.role.toString(), curRolePermissions: JSON.parse(row.role_permissions), curPermissions: JSON.parse(row.permissions), curPermissionsForChildren: JSON.parse(row.permissions_for_children), curPermissionsFromParent: row.permissions_from_parent ? JSON.parse(row.permissions_from_parent) : null, curMemberOfContainingThread: !!row.containing_role && row.containing_role > 0, }); } for (const ancestor of ancestors) { const { threadID, changesByUser } = ancestor; for (const [userID, changes] of changesByUser) { for (const descendantThreadInfo of descendantThreadInfos.values()) { const { users, parentThreadID, containingThreadID } = descendantThreadInfo; if (threadID !== parentThreadID && threadID !== containingThreadID) { continue; } let user: ?DescendantUserInfo = users.get(userID); if (!user) { user = ({}: DescendantUserInfo); users.set(userID, user); } if (threadID === parentThreadID) { user.nextPermissionsFromParent = changes.permissionsForChildren; user.potentiallyNeedsUpdate = true; } if (threadID === containingThreadID) { user.nextMemberOfContainingThread = changes.userIsMember; if (!user.nextMemberOfContainingThread) { user.potentiallyNeedsUpdate = true; } } } } } return [...descendantThreadInfos.values()]; } function getDescendantDepth(descendant: DescendantInfo): number { return descendant.depth; } function getDescendantKey(descendant: DescendantInfo): string { return descendant.threadID; } function mergeDescendants( a: DescendantInfo, b: DescendantInfo, ): DescendantInfo { const { users: usersA, ...restA } = a; const { users: usersB, ...restB } = b; if (!_isEqual(restA)(restB)) { console.warn( `inconsistent descendantInfos ${JSON.stringify(restA)}, ` + JSON.stringify(restB), ); throw new ServerError('internal_error'); } const newUsers = new Map(usersA); for (const [userID, userFromB] of usersB) { const userFromA = newUsers.get(userID); if (!userFromA) { newUsers.set(userID, userFromB); } else { newUsers.set(userID, { ...userFromA, ...userFromB }); } } return { ...a, users: newUsers }; } type RecalculatePermissionsMemberInfo = { role?: ?string, permissions?: ?ThreadPermissionsBlob, permissionsForChildren?: ?ThreadPermissionsBlob, rolePermissions?: ?ThreadRolePermissionsBlob, memberOfContainingThread?: boolean, permissionsFromParent?: ?ThreadPermissionsBlob, }; async function recalculateThreadPermissions( threadID: string, ): Promise { const threadQuery = SQL` SELECT type, depth, parent_thread_id, containing_thread_id FROM threads WHERE id = ${threadID} `; const membershipQuery = SQL` SELECT m.user, m.role, m.permissions, m.permissions_for_children, r.permissions AS role_permissions, cm.role AS containing_role FROM threads t INNER JOIN memberships m ON m.thread = t.id LEFT JOIN roles r ON r.id = m.role LEFT JOIN memberships cm ON cm.user = m.user AND cm.thread = t.containing_thread_id WHERE t.id = ${threadID} `; const parentMembershipQuery = SQL` SELECT pm.user, pm.permissions_for_children AS permissions_from_parent FROM threads t INNER JOIN memberships pm ON pm.thread = t.parent_thread_id WHERE t.id = ${threadID} `; const [[threadResults], [membershipResults], [parentMembershipResults]] = await Promise.all([ dbQuery(threadQuery), dbQuery(membershipQuery), dbQuery(parentMembershipQuery), ]); if (threadResults.length !== 1) { throw new ServerError('internal_error'); } const [threadResult] = threadResults; const threadType = assertThreadType(threadResult.type); const depth = threadResult.depth; const hasContainingThreadID = threadResult.containing_thread_id !== null; const parentThreadID = threadResult.parent_thread_id?.toString(); const membershipInfo: Map = new Map(); for (const row of membershipResults) { const userID = row.user.toString(); membershipInfo.set(userID, { role: row.role.toString(), permissions: JSON.parse(row.permissions), permissionsForChildren: JSON.parse(row.permissions_for_children), rolePermissions: JSON.parse(row.role_permissions), memberOfContainingThread: !!( row.containing_role && row.containing_role > 0 ), }); } for (const row of parentMembershipResults) { const userID = row.user.toString(); const permissionsFromParent = JSON.parse(row.permissions_from_parent); const membership = membershipInfo.get(userID); if (membership) { membership.permissionsFromParent = permissionsFromParent; } else { membershipInfo.set(userID, { permissionsFromParent: permissionsFromParent, }); } } const relationshipChangeset = new RelationshipChangeset(); const existingMemberIDs = membershipResults.map(row => row.user.toString()); if (threadID !== genesis.id) { relationshipChangeset.setAllRelationshipsExist(existingMemberIDs); } const parentMemberIDs = parentMembershipResults.map(row => row.user.toString(), ); if (parentThreadID && parentThreadID !== genesis.id) { relationshipChangeset.setAllRelationshipsExist(parentMemberIDs); } const membershipRows: Array = []; - const toUpdateDescendants = new Map(); + const toUpdateDescendants = new Map(); for (const [userID, membership] of membershipInfo) { const { rolePermissions: intendedRolePermissions, permissionsFromParent } = membership; const oldPermissions = membership?.permissions ?? null; const oldPermissionsForChildren = membership?.permissionsForChildren ?? null; const existingMembership = membership.role !== undefined; const oldRole = membership.role ?? '-1'; const memberOfContainingThread = hasContainingThreadID ? !!membership.memberOfContainingThread : true; const targetRole = memberOfContainingThread ? oldRole : '-1'; const rolePermissions = memberOfContainingThread ? intendedRolePermissions : null; const permissions = makePermissionsBlob( rolePermissions, permissionsFromParent, threadID, threadType, ); const permissionsForChildren = makePermissionsForChildrenBlob(permissions); const newRole = getRoleForPermissions(targetRole, permissions); const userLostMembership = Number(oldRole) > 0 && Number(newRole) <= 0; if (_isEqual(permissions)(oldPermissions) && oldRole === newRole) { // This thread and all of its descendants need no updates for this user, // since the corresponding memberships row is unchanged by this operation continue; } if (permissions) { membershipRows.push({ operation: 'save', intent: 'none', userID, threadID, userNeedsFullThreadDetails: false, permissions, permissionsForChildren, role: newRole, oldRole, }); } else { membershipRows.push({ operation: 'delete', intent: 'none', userID, threadID, oldRole, }); } if (permissions && !existingMembership && threadID !== genesis.id) { // If there was no membership row before, and we are creating one, // we'll need to make sure the new member has a relationship row with // each existing member. We handle guaranteeing that new members have // relationship rows with each other in saveMemberships. relationshipChangeset.setRelationshipsNeeded(userID, existingMemberIDs); } if ( userLostMembership || !_isEqual(permissionsForChildren)(oldPermissionsForChildren) ) { toUpdateDescendants.set(userID, { userIsMember: Number(newRole) > 0, permissionsForChildren, }); } } if (toUpdateDescendants.size > 0) { const { membershipRows: descendantMembershipRows, relationshipChangeset: descendantRelationshipChangeset, } = await updateDescendantPermissions({ threadID, depth, changesByUser: toUpdateDescendants, }); pushAll(membershipRows, descendantMembershipRows); relationshipChangeset.addAll(descendantRelationshipChangeset); } return { membershipRows, relationshipChangeset }; } const defaultSubscriptionString = JSON.stringify({ home: false, pushNotifs: false, }); const joinSubscriptionString = JSON.stringify({ home: true, pushNotifs: true }); const membershipInsertBatchSize = 50; const visibleExtractString = `$.${threadPermissions.VISIBLE}.value`; async function saveMemberships({ toSave, updateMembershipsLastMessage, }: { toSave: $ReadOnlyArray, updateMembershipsLastMessage: boolean, }) { if (toSave.length === 0) { return; } const time = Date.now(); const insertRows = []; for (const rowToSave of toSave) { insertRows.push([ rowToSave.userID, rowToSave.threadID, rowToSave.role, time, rowToSave.intent === 'join' ? joinSubscriptionString : defaultSubscriptionString, rowToSave.permissions ? JSON.stringify(rowToSave.permissions) : null, rowToSave.permissionsForChildren ? JSON.stringify(rowToSave.permissionsForChildren) : null, rowToSave.unread ? 1 : 0, 0, ]); } // Logic below will only update an existing membership row's `subscription` // column if the user is either joining or leaving the thread. That means // there's no way to use this function to update a user's subscription without // also making them join or leave the thread. The reason we do this is because // we need to specify a value for `subscription` here, as it's a non-null // column and this is an INSERT, but we don't want to require people to have // to know the current `subscription` when they're just using this function to // update the permissions of an existing membership row. while (insertRows.length > 0) { const batch = insertRows.splice(0, membershipInsertBatchSize); const query = SQL` INSERT INTO memberships (user, thread, role, creation_time, subscription, permissions, permissions_for_children, last_message, last_read_message) VALUES ${batch} ON DUPLICATE KEY UPDATE subscription = IF( (role <= 0 AND VALUE(role) > 0) OR (role > 0 AND VALUE(role) <= 0), VALUE(subscription), subscription ), role = VALUE(role), permissions = VALUE(permissions), permissions_for_children = VALUE(permissions_for_children) `; await dbQuery(query); } if (!updateMembershipsLastMessage) { return; } const joinRows = toSave .filter(row => row.intent === 'join') .map(row => [row.userID, row.threadID, row.unread]); if (joinRows.length === 0) { return; } const joinedUserThreadPairs = joinRows.map(([user, thread]) => [ user, thread, ]); const unreadUserThreadPairs = joinRows .filter(([, , unread]) => !!unread) .map(([user, thread]) => [user, thread]); let lastReadMessageExpression; if (unreadUserThreadPairs.length === 0) { lastReadMessageExpression = SQL` GREATEST(COALESCE(all_users_query.message, 0), COALESCE(last_subthread_message_for_user_query.message, 0)) `; } else { lastReadMessageExpression = SQL` (CASE WHEN ((mm.user, mm.thread) in (${unreadUserThreadPairs})) THEN 0 ELSE GREATEST(COALESCE(all_users_query.message, 0), COALESCE(last_subthread_message_for_user_query.message, 0)) END) `; } // We join two subqueries with the memberships table: // - the first subquery calculates the oldest non-CREATE_SUB_THREAD // message, which is the same for all users // - the second subquery calculates the oldest CREATE_SUB_THREAD messages, // which can be different for each user because of visibility permissions // Then we set the `last_message` column to the greater value of the two. // For `last_read_message` we do the same but only if the user should have // the "unread" status set for this thread. const query = SQL` UPDATE memberships mm LEFT JOIN ( SELECT thread, MAX(id) AS message FROM messages WHERE type != ${messageTypes.CREATE_SUB_THREAD} GROUP BY thread ) all_users_query ON mm.thread = all_users_query.thread LEFT JOIN ( SELECT m.thread, stm.user, MAX(m.id) AS message FROM messages m LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content WHERE JSON_EXTRACT(stm.permissions, ${visibleExtractString}) IS TRUE GROUP BY m.thread, stm.user ) last_subthread_message_for_user_query ON mm.thread = last_subthread_message_for_user_query.thread AND mm.user = last_subthread_message_for_user_query.user SET mm.last_message = GREATEST(COALESCE(all_users_query.message, 0), COALESCE(last_subthread_message_for_user_query.message, 0)), mm.last_read_message = `; query.append(lastReadMessageExpression); query.append(SQL`WHERE (mm.user, mm.thread) IN (${joinedUserThreadPairs});`); await dbQuery(query); } async function deleteMemberships( toDelete: $ReadOnlyArray, ) { if (toDelete.length === 0) { return; } const time = Date.now(); const insertRows = toDelete.map(rowToDelete => [ rowToDelete.userID, rowToDelete.threadID, -1, time, defaultSubscriptionString, null, null, 0, 0, ]); while (insertRows.length > 0) { const batch = insertRows.splice(0, membershipInsertBatchSize); const query = SQL` INSERT INTO memberships (user, thread, role, creation_time, subscription, permissions, permissions_for_children, last_message, last_read_message) VALUES ${batch} ON DUPLICATE KEY UPDATE role = -1, permissions = NULL, permissions_for_children = NULL, subscription = ${defaultSubscriptionString}, last_message = 0, last_read_message = 0 `; await dbQuery(query); } } const emptyCommitMembershipChangesetConfig = Object.freeze({}); // Specify non-empty changedThreadIDs to force updates to be generated for those // threads, presumably for reasons not covered in the changeset. calendarQuery // only needs to be specified if a JOIN_THREAD update will be generated for the // viewer, in which case it's necessary for knowing the set of entries to fetch. type ChangesetCommitResult = { ...CreateUpdatesResult, }; async function commitMembershipChangeset( viewer: Viewer, changeset: MembershipChangeset, { changedThreadIDs = new Set(), calendarQuery, updatesForCurrentSession = 'return', updateMembershipsLastMessage = false, }: { +changedThreadIDs?: Set, +calendarQuery?: ?CalendarQuery, +updatesForCurrentSession?: UpdatesForCurrentSession, +updateMembershipsLastMessage?: boolean, } = emptyCommitMembershipChangesetConfig, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const { membershipRows, relationshipChangeset } = changeset; - const membershipRowMap = new Map(); + const membershipRowMap = new Map(); for (const row of membershipRows) { const { userID, threadID } = row; changedThreadIDs.add(threadID); const pairString = `${userID}|${threadID}`; const existing = membershipRowMap.get(pairString); invariant( !existing || existing.intent === 'none' || row.intent === 'none', `multiple intents provided for ${pairString}`, ); if (!existing || existing.intent === 'none') { membershipRowMap.set(pairString, row); } } const toSave = [], toDelete = [], toRescindPushNotifs = []; for (const row of membershipRowMap.values()) { if ( row.operation === 'delete' || (row.operation === 'save' && Number(row.role) <= 0) ) { const { userID, threadID } = row; toRescindPushNotifs.push({ userID, threadID }); } if (row.operation === 'delete') { toDelete.push(row); } else { toSave.push(row); } } - const threadsToSavedUsers = new Map(); + const threadsToSavedUsers = new Map>(); for (const row of membershipRowMap.values()) { const { userID, threadID } = row; let savedUsers = threadsToSavedUsers.get(threadID); if (!savedUsers) { savedUsers = []; threadsToSavedUsers.set(threadID, savedUsers); } savedUsers.push(userID); } for (const [threadID, savedUsers] of threadsToSavedUsers) { if (threadID !== genesis.id) { relationshipChangeset.setAllRelationshipsNeeded(savedUsers); } } const relationshipRows = relationshipChangeset.getRows(); const [updateDatas] = await Promise.all([ updateChangedUndirectedRelationships(relationshipRows), saveMemberships({ toSave, updateMembershipsLastMessage }), deleteMemberships(toDelete), rescindPushNotifsForMemberDeletion(toRescindPushNotifs), ]); const serverThreadInfoFetchResult = await fetchServerThreadInfos({ threadIDs: changedThreadIDs, }); const { threadInfos: serverThreadInfos } = serverThreadInfoFetchResult; const time = Date.now(); for (const changedThreadID of changedThreadIDs) { const serverThreadInfo = serverThreadInfos[changedThreadID]; for (const memberInfo of serverThreadInfo.members) { const pairString = `${memberInfo.id}|${serverThreadInfo.id}`; const membershipRow = membershipRowMap.get(pairString); if (membershipRow) { continue; } updateDatas.push({ type: updateTypes.UPDATE_THREAD, userID: memberInfo.id, time, threadID: changedThreadID, }); } } for (const row of membershipRowMap.values()) { const { userID, threadID } = row; if (row.operation === 'delete' || row.role === '-1') { if (row.oldRole !== '-1') { updateDatas.push({ type: updateTypes.DELETE_THREAD, userID, time, threadID, }); } } else if (row.userNeedsFullThreadDetails) { updateDatas.push({ type: updateTypes.JOIN_THREAD, userID, time, threadID, }); } else { updateDatas.push({ type: updateTypes.UPDATE_THREAD, userID, time, threadID, }); } } const threadInfoFetchResult = rawThreadInfosFromServerThreadInfos( viewer, serverThreadInfoFetchResult, ); const { viewerUpdates, userInfos } = await createUpdates(updateDatas, { viewer, calendarQuery, ...threadInfoFetchResult, updatesForCurrentSession, }); return { userInfos, viewerUpdates, }; } const emptyGetChangesetCommitResultConfig = Object.freeze({}); // When the user tries to create a new thread, it's possible for the client to // fail the creation even if a row gets added to the threads table. This may // occur due to a timeout (on either the client or server side), or due to some // error in the server code following the INSERT operation. Handling the error // scenario is more challenging since it would require detecting which set of // operations failed so we could retry them. As a result, this code is geared at // only handling the timeout scenario. async function getChangesetCommitResultForExistingThread( viewer: Viewer, threadID: string, otherUpdates: $ReadOnlyArray, { calendarQuery, updatesForCurrentSession = 'return', }: { +calendarQuery?: ?CalendarQuery, +updatesForCurrentSession?: UpdatesForCurrentSession, } = emptyGetChangesetCommitResultConfig, ): Promise { for (const update of otherUpdates) { if ( update.type === updateTypes.JOIN_THREAD && update.threadInfo.id === threadID ) { // If the JOIN_THREAD is already there we can expect // the appropriate UPDATE_USERs to be covered as well return { viewerUpdates: otherUpdates, userInfos: {} }; } } const time = Date.now(); const updateDatas: Array = [ { type: updateTypes.JOIN_THREAD, userID: viewer.userID, time, threadID, targetSession: viewer.session, }, ]; // To figure out what UserInfos might be missing, we consider the worst case: // the same client previously attempted to create a thread with a non-friend // they found via search results, but the request timed out. In this scenario // the viewer might never have received the UPDATE_USER that would add that // UserInfo to their UserStore, but the server assumed the client had gotten // it because createUpdates was called with UpdatesForCurrentSession=return. // For completeness here we query for the full list of memberships rows in the // thread. We can't use fetchServerThreadInfos because it skips role=-1 rows const membershipsQuery = SQL` SELECT user FROM memberships WHERE thread = ${threadID} AND user != ${viewer.userID} `; const [results] = await dbQuery(membershipsQuery); for (const row of results) { updateDatas.push({ type: updateTypes.UPDATE_USER, userID: viewer.userID, time, updatedUserID: row.user.toString(), targetSession: viewer.session, }); } const { viewerUpdates, userInfos } = await createUpdates(updateDatas, { viewer, calendarQuery, updatesForCurrentSession, }); return { viewerUpdates: [...otherUpdates, ...viewerUpdates], userInfos }; } const rescindPushNotifsBatchSize = 3; async function rescindPushNotifsForMemberDeletion( toRescindPushNotifs: $ReadOnlyArray<{ +userID: string, +threadID: string }>, ): Promise { const queue = [...toRescindPushNotifs]; while (queue.length > 0) { const batch = queue.splice(0, rescindPushNotifsBatchSize); await Promise.all( batch.map(({ userID, threadID }) => rescindPushNotifs( SQL`n.thread = ${threadID} AND n.user = ${userID}`, SQL`IF(m.thread = ${threadID}, NULL, m.thread)`, ), ), ); } } // Deprecated - use updateRolesAndPermissionsForAllThreads instead async function DEPRECATED_recalculateAllThreadPermissions() { const getAllThreads = SQL`SELECT id FROM threads`; const [result] = await dbQuery(getAllThreads); // We handle each thread one-by-one to avoid a situation where a permission // calculation for a child thread, done during a call to // recalculateThreadPermissions for the parent thread, can be incorrectly // overriden by a call to recalculateThreadPermissions for the child thread. // If the changeset resulting from the parent call isn't committed before the // calculation is done for the child, the calculation done for the child can // be incorrect. const viewer = createScriptViewer(bots.commbot.userID); for (const row of result) { const threadID = row.id.toString(); const changeset = await recalculateThreadPermissions(threadID); await commitMembershipChangeset(viewer, changeset); } } async function updateRolesAndPermissionsForAllThreads() { const batchSize = 10; const fetchThreads = SQL`SELECT id, type, depth FROM threads`; const [result] = await dbQuery(fetchThreads); const allThreads = result.map(row => { return { id: row.id.toString(), type: assertThreadType(row.type), depth: row.depth, }; }); const viewer = createScriptViewer(bots.commbot.userID); const maxDepth = Math.max(...allThreads.map(row => row.depth)); for (let depth = 0; depth <= maxDepth; depth++) { const threads = allThreads.filter(row => row.depth === depth); console.log(`recalculating permissions for threads with depth ${depth}`); while (threads.length > 0) { const batch = threads.splice(0, batchSize); const membershipRows: Array = []; const relationshipChangeset = new RelationshipChangeset(); await Promise.all( batch.map(async thread => { console.log(`updating roles for ${thread.id}`); await updateRoles(viewer, thread.id, thread.type); console.log(`recalculating permissions for ${thread.id}`); const { membershipRows: threadMembershipRows, relationshipChangeset: threadRelationshipChangeset, } = await recalculateThreadPermissions(thread.id); membershipRows.push(...threadMembershipRows); relationshipChangeset.addAll(threadRelationshipChangeset); }), ); console.log(`committing batch ${JSON.stringify(batch)}`); await commitMembershipChangeset(viewer, { membershipRows, relationshipChangeset, }); } } } export { changeRole, recalculateThreadPermissions, getChangesetCommitResultForExistingThread, saveMemberships, commitMembershipChangeset, DEPRECATED_recalculateAllThreadPermissions, updateRolesAndPermissionsForAllThreads, }; diff --git a/keyserver/src/utils/ens-cache.js b/keyserver/src/utils/ens-cache.js index 6efe9ce0f..7cfb9b177 100644 --- a/keyserver/src/utils/ens-cache.js +++ b/keyserver/src/utils/ens-cache.js @@ -1,27 +1,29 @@ // @flow import { ethers } from 'ethers'; import { getCommConfig } from 'lib/utils/comm-config.js'; import { ENSCache } from 'lib/utils/ens-cache.js'; import { getENSNames as baseGetENSNames, type GetENSNames, } from 'lib/utils/ens-helpers.js'; +type AlchemyConfig = { +key: string }; + let getENSNames: ?GetENSNames; async function initENSCache() { - const alchemySecret = await getCommConfig({ + const alchemySecret = await getCommConfig({ folder: 'secrets', name: 'alchemy', }); const alchemyKey = alchemySecret?.key; if (!alchemyKey) { return; } const provider = new ethers.providers.AlchemyProvider('mainnet', alchemyKey); const ensCache = new ENSCache(provider); getENSNames = baseGetENSNames.bind(null, ensCache); } export { initENSCache, getENSNames }; diff --git a/keyserver/src/utils/validation-utils.test.js b/keyserver/src/utils/validation-utils.test.js index 3ccc72360..5c0d46561 100644 --- a/keyserver/src/utils/validation-utils.test.js +++ b/keyserver/src/utils/validation-utils.test.js @@ -1,74 +1,90 @@ // @flow import t from 'tcomb'; import { tPassword, tShape } from 'lib/utils/validation-utils.js'; import { sanitizeInput, redactedString } from './validation-utils.js'; describe('sanitization', () => { it('should redact a string', () => { expect(sanitizeInput(tPassword, 'password')).toStrictEqual(redactedString); }); it('should redact a string inside an object', () => { - const validator = tShape({ password: tPassword }); + const validator = tShape<{ +password: string }>({ password: tPassword }); const object = { password: 'password' }; const redacted = { password: redactedString }; expect(sanitizeInput(validator, object)).toStrictEqual(redacted); }); it('should redact an optional string', () => { - const validator = tShape({ password: t.maybe(tPassword) }); + const validator = tShape<{ +password: ?string }>({ + password: t.maybe(tPassword), + }); const object = { password: 'password' }; const redacted = { password: redactedString }; expect(sanitizeInput(validator, object)).toStrictEqual(redacted); }); it('should redact a string in optional object', () => { - const validator = tShape({ obj: t.maybe(tShape({ password: tPassword })) }); + const validator = tShape<{ +obj?: ?{ +password: string } }>({ + obj: t.maybe(tShape<{ +password: string }>({ password: tPassword })), + }); const object = { obj: { password: 'password' } }; const redacted = { obj: { password: redactedString } }; expect(sanitizeInput(validator, object)).toStrictEqual(redacted); }); it('should redact a string array', () => { - const validator = tShape({ passwords: t.list(tPassword) }); + const validator = tShape<{ +passwords: $ReadOnlyArray }>({ + passwords: t.list(tPassword), + }); const object = { passwords: ['password', 'password'] }; const redacted = { passwords: [redactedString, redactedString] }; expect(sanitizeInput(validator, object)).toStrictEqual(redacted); }); it('should redact a string inside a dict', () => { - const validator = tShape({ passwords: t.dict(t.String, tPassword) }); + const validator = tShape<{ +passwords: { +[string]: string } }>({ + passwords: t.dict(t.String, tPassword), + }); const object = { passwords: { a: 'password', b: 'password' } }; const redacted = { passwords: { a: redactedString, b: redactedString } }; expect(sanitizeInput(validator, object)).toStrictEqual(redacted); }); it('should redact password dict key', () => { - const validator = tShape({ passwords: t.dict(tPassword, t.Bool) }); + const validator = tShape<{ +passwords: { +[string]: boolean } }>({ + passwords: t.dict(tPassword, t.Bool), + }); const object = { passwords: { password1: true, password2: false } }; const redacted: { +passwords: { [string]: mixed } } = { passwords: {} }; redacted.passwords[redactedString] = false; expect(sanitizeInput(validator, object)).toStrictEqual(redacted); }); it('should redact a string inside a union', () => { - const validator = tShape({ + const validator = tShape<{ + +password: string | boolean, + }>({ password: t.union([tPassword, t.String, t.Bool]), }); const object = { password: 'password' }; const redacted = { password: redactedString }; expect(sanitizeInput(validator, object)).toStrictEqual(redacted); }); it('should redact a string inside an object array', () => { - const validator = tShape({ + const validator = tShape<{ + +passwords: $ReadOnlyArray<{ + +password: string, + }>, + }>({ passwords: t.list(tShape({ password: tPassword })), }); const object = { passwords: [{ password: 'password' }] }; const redacted = { passwords: [{ password: redactedString }] }; expect(sanitizeInput(validator, object)).toStrictEqual(redacted); }); });