diff --git a/desktop/src/main.js b/desktop/src/main.js index 9d63a3fef..4fcd31e53 100644 --- a/desktop/src/main.js +++ b/desktop/src/main.js @@ -1,287 +1,287 @@ // @flow import { app, BrowserWindow, shell, Menu, ipcMain, systemPreferences, autoUpdater, } from 'electron/main'; import fs from 'fs'; import path from 'path'; -import { initAutoUpdate } from './auto-update'; -import { handleSquirrelEvent } from './handle-squirrel-event'; +import { initAutoUpdate } from './auto-update.js'; +import { handleSquirrelEvent } from './handle-squirrel-event.js'; const isDev = process.env.ENV === 'dev'; const url = isDev ? 'http://localhost/comm/' : 'https://web.comm.app'; const isMac = process.platform === 'darwin'; const scrollbarCSS = fs.promises.readFile( path.resolve(__dirname, '../scrollbar.css'), 'utf8', ); const setApplicationMenu = () => { let mainMenu = []; if (isMac) { mainMenu = [ { label: app.name, submenu: [ { role: 'about' }, { type: 'separator' }, { role: 'services' }, { type: 'separator' }, { role: 'hide' }, { role: 'hideOthers' }, { role: 'unhide' }, { type: 'separator' }, { role: 'quit' }, ], }, ]; } const viewMenu = { label: 'View', submenu: [ { role: 'reload' }, { type: 'separator' }, { role: 'resetZoom' }, { role: 'zoomIn' }, { role: 'zoomOut' }, { type: 'separator' }, { role: 'togglefullscreen' }, { role: 'toggleDevTools' }, ], }; const windowMenu = { label: 'Window', submenu: [ { role: 'minimize' }, ...(isMac ? [ { type: 'separator' }, { role: 'front' }, { type: 'separator' }, { role: 'window' }, ] : [{ role: 'close' }]), ], }; const menu = Menu.buildFromTemplate([ ...mainMenu, { role: 'fileMenu' }, { role: 'editMenu' }, viewMenu, windowMenu, ]); Menu.setApplicationMenu(menu); }; const createMainWindow = () => { const win = new BrowserWindow({ show: false, width: 1300, height: 800, minWidth: 1100, minHeight: 600, titleBarStyle: 'hidden', trafficLightPosition: { x: 20, y: 24 }, titleBarOverlay: { color: '#0A0A0A', symbolColor: '#FFFFFF', height: 64, }, backgroundColor: '#0A0A0A', webPreferences: { preload: path.resolve(__dirname, 'preload.js'), }, }); const updateNavigationState = () => { win.webContents.send('on-navigate', { canGoBack: win.webContents.canGoBack(), canGoForward: win.webContents.canGoForward(), }); }; win.webContents.on('did-navigate-in-page', updateNavigationState); const clearHistory = () => { win.webContents.clearHistory(); updateNavigationState(); }; ipcMain.on('clear-history', clearHistory); const doubleClickTopBar = () => { if (isMac) { // Possible values for AppleActionOnDoubleClick are Maximize, // Minimize or None. We handle the last two inside this if. // Maximize (which is the only behaviour for other platforms) // is handled in the later block. const action = systemPreferences.getUserDefault( 'AppleActionOnDoubleClick', 'string', ); if (action === 'None') { return; } else if (action === 'Minimize') { win.minimize(); return; } } if (win.isMaximized()) { win.unmaximize(); } else { win.maximize(); } }; ipcMain.on('double-click-top-bar', doubleClickTopBar); const updateDownloaded = (event, releaseNotes, releaseName) => { win.webContents.send('on-new-version-available', releaseName); }; autoUpdater.on('update-downloaded', updateDownloaded); win.on('closed', () => { ipcMain.removeListener('clear-history', clearHistory); ipcMain.removeListener('double-click-top-bar', doubleClickTopBar); autoUpdater.removeListener('update-downloaded', updateDownloaded); }); win.webContents.setWindowOpenHandler(({ url: openURL }) => { shell.openExternal(openURL); // Returning 'deny' prevents a new electron window from being created return { action: 'deny' }; }); (async () => { const css = await scrollbarCSS; win.webContents.insertCSS(css); })(); win.loadURL(url); return win; }; const createSplashWindow = () => { const win = new BrowserWindow({ width: 300, height: 300, resizable: false, frame: false, alwaysOnTop: true, center: true, backgroundColor: '#111827', }); win.loadFile(path.resolve(__dirname, '../pages/splash.html')); return win; }; const createErrorWindow = () => { const win = new BrowserWindow({ show: false, width: 400, height: 300, resizable: false, center: true, titleBarStyle: 'hidden', trafficLightPosition: { x: 20, y: 24 }, backgroundColor: '#111827', }); win.on('close', () => { app.quit(); }); win.loadFile(path.resolve(__dirname, '../pages/error.html')); return win; }; const show = () => { const splash = createSplashWindow(); const error = createErrorWindow(); const main = createMainWindow(); let loadedSuccessfully = true; main.webContents.on('did-fail-load', () => { loadedSuccessfully = false; if (!splash.isDestroyed()) { splash.destroy(); } if (!error.isDestroyed()) { error.show(); } setTimeout(() => { loadedSuccessfully = true; main.loadURL(url); }, 1000); }); main.webContents.on('did-finish-load', () => { if (loadedSuccessfully) { if (!splash.isDestroyed()) { splash.destroy(); } if (!error.isDestroyed()) { error.destroy(); } main.show(); } }); }; const run = () => { app.setName('Comm'); setApplicationMenu(); (async () => { await app.whenReady(); if (app.isPackaged) { try { initAutoUpdate(); } catch (error) { console.error(error); } } ipcMain.on('set-badge', (event, value) => { if (isMac) { app.dock.setBadge(value?.toString() ?? ''); } }); ipcMain.on('get-version', event => { event.returnValue = app.getVersion().toString(); }); show(); app.on('activate', () => { if (BrowserWindow.getAllWindows().length === 0) { show(); } }); })(); app.on('window-all-closed', () => { if (!isMac) { app.quit(); } }); }; if (app.isPackaged && process.platform === 'win32') { if (!handleSquirrelEvent()) { run(); } } else { run(); } diff --git a/desktop/src/preload.js b/desktop/src/preload.js index 68cb96d43..825756158 100644 --- a/desktop/src/preload.js +++ b/desktop/src/preload.js @@ -1,26 +1,26 @@ // @flow import { contextBridge, ipcRenderer } from 'electron/renderer'; -import type { ElectronBridge } from 'lib/types/electron-types'; +import type { ElectronBridge } from 'lib/types/electron-types.js'; const bridge: ElectronBridge = { onNavigate: callback => { const withEvent = (event, ...args) => callback(...args); ipcRenderer.on('on-navigate', withEvent); return () => ipcRenderer.removeListener('on-navigate', withEvent); }, clearHistory: () => ipcRenderer.send('clear-history'), doubleClickTopBar: () => ipcRenderer.send('double-click-top-bar'), setBadge: value => ipcRenderer.send('set-badge', value), version: ipcRenderer.sendSync('get-version'), onNewVersionAvailable: callback => { const withEvent = (event, ...args) => callback(...args); ipcRenderer.on('on-new-version-available', withEvent); return () => ipcRenderer.removeListener('on-new-version-available', withEvent); }, updateToNewVersion: () => ipcRenderer.send('update-to-new-version'), }; contextBridge.exposeInMainWorld('electronContextBridge', bridge); diff --git a/keyserver/src/bots/commbot.js b/keyserver/src/bots/commbot.js index 59de62653..583ad1355 100644 --- a/keyserver/src/bots/commbot.js +++ b/keyserver/src/bots/commbot.js @@ -1,30 +1,30 @@ // @flow import invariant from 'invariant'; -import bots from 'lib/facts/bots'; -import { threadTypes } from 'lib/types/thread-types'; +import bots from 'lib/facts/bots.js'; +import { threadTypes } from 'lib/types/thread-types.js'; -import { createThread } from '../creators/thread-creator'; -import { createBotViewer } from '../session/bots'; +import { createThread } from '../creators/thread-creator.js'; +import { createBotViewer } from '../session/bots.js'; const { commbot } = bots; async function createCommbotThread(userID: string): Promise { const commbotViewer = createBotViewer(commbot.userID); const newThreadRequest = { type: threadTypes.PERSONAL, initialMemberIDs: [userID], }; const result = await createThread(commbotViewer, newThreadRequest, { forceAddMembers: true, }); const { newThreadID } = result; invariant( newThreadID, 'createThread should return newThreadID to bot viewer', ); return newThreadID; } export { createCommbotThread }; diff --git a/keyserver/src/creators/account-creator.js b/keyserver/src/creators/account-creator.js index 9c2614758..2968b7c3b 100644 --- a/keyserver/src/creators/account-creator.js +++ b/keyserver/src/creators/account-creator.js @@ -1,301 +1,301 @@ // @flow import invariant from 'invariant'; import bcrypt from 'twin-bcrypt'; -import ashoat from 'lib/facts/ashoat'; -import bots from 'lib/facts/bots'; -import genesis from 'lib/facts/genesis'; +import ashoat from 'lib/facts/ashoat.js'; +import bots from 'lib/facts/bots.js'; +import genesis from 'lib/facts/genesis.js'; import { policyTypes } from 'lib/facts/policies.js'; import { validUsernameRegex, oldValidUsernameRegex, -} from 'lib/shared/account-utils'; -import { hasMinCodeVersion } from 'lib/shared/version-utils'; +} from 'lib/shared/account-utils.js'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; import type { RegisterResponse, RegisterRequest, -} from 'lib/types/account-types'; +} from 'lib/types/account-types.js'; import type { PlatformDetails, DeviceTokenUpdateRequest, } from 'lib/types/device-types.js'; import type { CalendarQuery } from 'lib/types/entry-types.js'; -import { messageTypes } from 'lib/types/message-types'; +import { messageTypes } from 'lib/types/message-types.js'; import type { SIWESocialProof } from 'lib/types/siwe-types.js'; -import { threadTypes } from 'lib/types/thread-types'; -import { ServerError } from 'lib/utils/errors'; -import { values } from 'lib/utils/objects'; -import { reservedUsernamesSet } from 'lib/utils/reserved-users'; +import { threadTypes } from 'lib/types/thread-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { values } from 'lib/utils/objects.js'; +import { reservedUsernamesSet } from 'lib/utils/reserved-users.js'; import { isValidEthereumAddress } from 'lib/utils/siwe-utils.js'; -import { dbQuery, SQL } from '../database/database'; -import { deleteCookie } from '../deleters/cookie-deleters'; -import { fetchThreadInfos } from '../fetchers/thread-fetchers'; +import { dbQuery, SQL } from '../database/database.js'; +import { deleteCookie } from '../deleters/cookie-deleters.js'; +import { fetchThreadInfos } from '../fetchers/thread-fetchers.js'; import { fetchLoggedInUserInfo, fetchKnownUserInfos, -} from '../fetchers/user-fetchers'; -import { verifyCalendarQueryThreadIDs } from '../responders/entry-responders'; -import { createNewUserCookie, setNewSession } from '../session/cookies'; -import { createScriptViewer } from '../session/scripts'; -import type { Viewer } from '../session/viewer'; -import { updateThread } from '../updaters/thread-updaters'; +} from '../fetchers/user-fetchers.js'; +import { verifyCalendarQueryThreadIDs } from '../responders/entry-responders.js'; +import { createNewUserCookie, setNewSession } from '../session/cookies.js'; +import { createScriptViewer } from '../session/scripts.js'; +import type { Viewer } from '../session/viewer.js'; +import { updateThread } from '../updaters/thread-updaters.js'; import { viewerAcknowledgmentUpdater } from '../updaters/viewer-acknowledgment-updater.js'; -import createIDs from './id-creator'; -import createMessages from './message-creator'; +import createIDs from './id-creator.js'; +import createMessages from './message-creator.js'; import { createThread, createPrivateThread, privateThreadDescription, -} from './thread-creator'; +} from './thread-creator.js'; const { commbot } = bots; const ashoatMessages = [ 'welcome to Comm!', 'as you inevitably discover bugs, have feature requests, or design ' + 'suggestions, feel free to message them to me in the app.', ]; const privateMessages = [privateThreadDescription]; async function createAccount( viewer: Viewer, request: RegisterRequest, ): Promise { if (request.password.trim() === '') { throw new ServerError('empty_password'); } const usernameRegex = hasMinCodeVersion(viewer.platformDetails, 69) ? validUsernameRegex : oldValidUsernameRegex; if (request.username.search(usernameRegex) === -1) { throw new ServerError('invalid_username'); } const usernameQuery = SQL` SELECT COUNT(id) AS count FROM users WHERE LCASE(username) = LCASE(${request.username}) `; const promises = [dbQuery(usernameQuery)]; const { calendarQuery } = request; if (calendarQuery) { promises.push(verifyCalendarQueryThreadIDs(calendarQuery)); } const [[usernameResult]] = await Promise.all(promises); if ( reservedUsernamesSet.has(request.username.toLowerCase()) || isValidEthereumAddress(request.username.toLowerCase()) ) { if (hasMinCodeVersion(viewer.platformDetails, 120)) { throw new ServerError('username_reserved'); } else { throw new ServerError('username_taken'); } } if (usernameResult[0].count !== 0) { throw new ServerError('username_taken'); } const hash = bcrypt.hashSync(request.password); const time = Date.now(); const deviceToken = request.deviceTokenUpdateRequest ? request.deviceTokenUpdateRequest.deviceToken : viewer.deviceToken; const [id] = await createIDs('users', 1); const newUserRow = [id, request.username, hash, time]; const newUserQuery = SQL` INSERT INTO users(id, username, hash, creation_time) VALUES ${[newUserRow]} `; const [userViewerData] = await Promise.all([ createNewUserCookie(id, { platformDetails: request.platformDetails, deviceToken, }), deleteCookie(viewer.cookieID), dbQuery(newUserQuery), ]); viewer.setNewCookie(userViewerData); if (calendarQuery) { await setNewSession(viewer, calendarQuery, 0); } await Promise.all([ updateThread( createScriptViewer(ashoat.id), { threadID: genesis.id, changes: { newMemberIDs: [id] }, }, { forceAddMembers: true, silenceMessages: true, ignorePermissions: true }, ), viewerAcknowledgmentUpdater(viewer, policyTypes.tosAndPrivacyPolicy), ]); const [privateThreadResult, ashoatThreadResult] = await Promise.all([ createPrivateThread(viewer, request.username), createThread( viewer, { type: threadTypes.PERSONAL, initialMemberIDs: [ashoat.id], }, { forceAddMembers: true }, ), ]); const ashoatThreadID = ashoatThreadResult.newThreadInfo ? ashoatThreadResult.newThreadInfo.id : ashoatThreadResult.newThreadID; const privateThreadID = privateThreadResult.newThreadInfo ? privateThreadResult.newThreadInfo.id : privateThreadResult.newThreadID; invariant( ashoatThreadID && privateThreadID, 'createThread should return either newThreadInfo or newThreadID', ); let messageTime = Date.now(); const ashoatMessageDatas = ashoatMessages.map(message => ({ type: messageTypes.TEXT, threadID: ashoatThreadID, creatorID: ashoat.id, time: messageTime++, text: message, })); const privateMessageDatas = privateMessages.map(message => ({ type: messageTypes.TEXT, threadID: privateThreadID, creatorID: commbot.userID, time: messageTime++, text: message, })); const messageDatas = [...ashoatMessageDatas, ...privateMessageDatas]; const [ messageInfos, threadsResult, userInfos, currentUserInfo, ] = await Promise.all([ createMessages(viewer, messageDatas), fetchThreadInfos(viewer), fetchKnownUserInfos(viewer), fetchLoggedInUserInfo(viewer), ]); const rawMessageInfos = [ ...ashoatThreadResult.newMessageInfos, ...privateThreadResult.newMessageInfos, ...messageInfos, ]; return { id, rawMessageInfos, currentUserInfo, cookieChange: { threadInfos: threadsResult.threadInfos, userInfos: values(userInfos), }, }; } export type ProcessSIWEAccountCreationRequest = { +address: string, +calendarQuery: CalendarQuery, +deviceTokenUpdateRequest?: ?DeviceTokenUpdateRequest, +platformDetails: PlatformDetails, +primaryIdentityPublicKey: ?string, +socialProof: SIWESocialProof, }; // Note: `processSIWEAccountCreation(...)` assumes that the validity of // `ProcessSIWEAccountCreationRequest` was checked at call site. async function processSIWEAccountCreation( viewer: Viewer, request: ProcessSIWEAccountCreationRequest, ): Promise { const { calendarQuery } = request; await verifyCalendarQueryThreadIDs(calendarQuery); const time = Date.now(); const deviceToken = request.deviceTokenUpdateRequest ? request.deviceTokenUpdateRequest.deviceToken : viewer.deviceToken; const [id] = await createIDs('users', 1); const newUserRow = [id, request.address, request.address, time]; const newUserQuery = SQL` INSERT INTO users(id, username, ethereum_address, creation_time) VALUES ${[newUserRow]} `; const [userViewerData] = await Promise.all([ createNewUserCookie(id, { platformDetails: request.platformDetails, deviceToken, primaryIdentityPublicKey: request.primaryIdentityPublicKey, socialProof: request.socialProof, }), deleteCookie(viewer.cookieID), dbQuery(newUserQuery), ]); viewer.setNewCookie(userViewerData); await setNewSession(viewer, calendarQuery, 0); await Promise.all([ updateThread( createScriptViewer(ashoat.id), { threadID: genesis.id, changes: { newMemberIDs: [id] }, }, { forceAddMembers: true, silenceMessages: true, ignorePermissions: true }, ), viewerAcknowledgmentUpdater(viewer, policyTypes.tosAndPrivacyPolicy), ]); const [privateThreadResult, ashoatThreadResult] = await Promise.all([ createPrivateThread(viewer, request.address), createThread( viewer, { type: threadTypes.PERSONAL, initialMemberIDs: [ashoat.id], }, { forceAddMembers: true }, ), ]); const ashoatThreadID = ashoatThreadResult.newThreadInfo ? ashoatThreadResult.newThreadInfo.id : ashoatThreadResult.newThreadID; const privateThreadID = privateThreadResult.newThreadInfo ? privateThreadResult.newThreadInfo.id : privateThreadResult.newThreadID; invariant( ashoatThreadID && privateThreadID, 'createThread should return either newThreadInfo or newThreadID', ); let messageTime = Date.now(); const ashoatMessageDatas = ashoatMessages.map(message => ({ type: messageTypes.TEXT, threadID: ashoatThreadID, creatorID: ashoat.id, time: messageTime++, text: message, })); const privateMessageDatas = privateMessages.map(message => ({ type: messageTypes.TEXT, threadID: privateThreadID, creatorID: commbot.userID, time: messageTime++, text: message, })); const messageDatas = [...ashoatMessageDatas, ...privateMessageDatas]; await Promise.all([createMessages(viewer, messageDatas)]); return id; } export { createAccount, processSIWEAccountCreation }; diff --git a/keyserver/src/creators/day-creator.js b/keyserver/src/creators/day-creator.js index a50aa9e32..73fcaacec 100644 --- a/keyserver/src/creators/day-creator.js +++ b/keyserver/src/creators/day-creator.js @@ -1,57 +1,57 @@ // @flow -import { ServerError } from 'lib/utils/errors'; +import { ServerError } from 'lib/utils/errors.js'; -import { dbQuery, SQL } from '../database/database'; -import createIDs from './id-creator'; +import { dbQuery, SQL } from '../database/database.js'; +import createIDs from './id-creator.js'; const MYSQL_DUPLICATE_ENTRY_FOR_KEY_ERROR_CODE = 1062; async function fetchOrCreateDayID( threadID: string, date: string, ): Promise { if (!threadID || !date) { throw new ServerError('invalid_parameters'); } const existingQuery = SQL` SELECT id FROM days WHERE date = ${date} AND thread = ${threadID} `; const [existingResult] = await dbQuery(existingQuery); if (existingResult.length > 0) { const existingRow = existingResult[0]; return existingRow.id.toString(); } const [id] = await createIDs('days', 1); const insertQuery = SQL` INSERT INTO days(id, date, thread) VALUES ${[[id, date, threadID]]} `; try { await dbQuery(insertQuery); return id; } catch (e) { if (e.errno !== MYSQL_DUPLICATE_ENTRY_FOR_KEY_ERROR_CODE) { throw new ServerError('unknown_error'); } // There's a race condition that can happen if two people start editing // the same date at the same time, and two IDs are created for the same // row. If this happens, the UNIQUE constraint `date_thread` should be // triggered on the second racer, and for that execution path our last // query will have failed. We will recover by re-querying for the ID here, // and deleting the extra ID we created from the `ids` table. const deleteIDQuery = SQL`DELETE FROM ids WHERE id = ${id}`; const [[raceResult]] = await Promise.all([ dbQuery(existingQuery), dbQuery(deleteIDQuery), ]); if (raceResult.length === 0) { throw new ServerError('unknown_error'); } const raceRow = raceResult[0]; return raceRow.id.toString(); } } export default fetchOrCreateDayID; diff --git a/keyserver/src/creators/entry-creator.js b/keyserver/src/creators/entry-creator.js index 63190f9c6..0843139a2 100644 --- a/keyserver/src/creators/entry-creator.js +++ b/keyserver/src/creators/entry-creator.js @@ -1,149 +1,149 @@ // @flow import invariant from 'invariant'; import type { CreateEntryRequest, SaveEntryResponse, -} from 'lib/types/entry-types'; -import { messageTypes } from 'lib/types/message-types'; -import { threadPermissions } from 'lib/types/thread-types'; -import { dateFromString } from 'lib/utils/date-utils'; -import { ServerError } from 'lib/utils/errors'; -import { values } from 'lib/utils/objects'; +} from 'lib/types/entry-types.js'; +import { messageTypes } from 'lib/types/message-types.js'; +import { threadPermissions } from 'lib/types/thread-types.js'; +import { dateFromString } from 'lib/utils/date-utils.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { values } from 'lib/utils/objects.js'; -import fetchOrCreateDayID from '../creators/day-creator'; -import createIDs from '../creators/id-creator'; -import createMessages from '../creators/message-creator'; -import { dbQuery, SQL } from '../database/database'; -import { fetchEntryInfoForLocalID } from '../fetchers/entry-fetchers'; -import { fetchMessageInfoForEntryAction } from '../fetchers/message-fetchers'; -import { checkThreadPermission } from '../fetchers/thread-permission-fetchers'; -import { fetchUpdateInfoForEntryUpdate } from '../fetchers/update-fetchers'; -import type { Viewer } from '../session/viewer'; -import { createUpdateDatasForChangedEntryInfo } from '../updaters/entry-updaters'; -import { creationString } from '../utils/idempotent'; +import fetchOrCreateDayID from '../creators/day-creator.js'; +import createIDs from '../creators/id-creator.js'; +import createMessages from '../creators/message-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { fetchEntryInfoForLocalID } from '../fetchers/entry-fetchers.js'; +import { fetchMessageInfoForEntryAction } from '../fetchers/message-fetchers.js'; +import { checkThreadPermission } from '../fetchers/thread-permission-fetchers.js'; +import { fetchUpdateInfoForEntryUpdate } from '../fetchers/update-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; +import { createUpdateDatasForChangedEntryInfo } from '../updaters/entry-updaters.js'; +import { creationString } from '../utils/idempotent.js'; async function createEntry( viewer: Viewer, request: CreateEntryRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const hasPermission = await checkThreadPermission( viewer, request.threadID, threadPermissions.EDIT_ENTRIES, ); if (!hasPermission) { throw new ServerError('invalid_credentials'); } const existingEntryInfo = await fetchEntryInfoForLocalID( viewer, request.localID, ); if (existingEntryInfo) { const { id: entryID, threadID } = existingEntryInfo; invariant(entryID, 'should be set'); const [rawMessageInfo, fetchUpdatesResult] = await Promise.all([ fetchMessageInfoForEntryAction( viewer, messageTypes.CREATE_ENTRY, entryID, threadID, ), fetchUpdateInfoForEntryUpdate(viewer, entryID), ]); return { entryID, newMessageInfos: rawMessageInfo ? [rawMessageInfo] : [], updatesResult: { viewerUpdates: fetchUpdatesResult.updateInfos, userInfos: values(fetchUpdatesResult.userInfos), }, }; } const [dayID, [entryID], [revisionID]] = await Promise.all([ fetchOrCreateDayID(request.threadID, request.date), createIDs('entries', 1), createIDs('revisions', 1), ]); const creation = request.localID && viewer.hasSessionInfo ? creationString(viewer, request.localID) : null; const viewerID = viewer.userID; const entryRow = [ entryID, dayID, request.text, viewerID, request.timestamp, request.timestamp, 0, creation, ]; const revisionRow = [ revisionID, entryID, viewerID, request.text, request.timestamp, viewer.session, request.timestamp, 0, ]; const entryInsertQuery = SQL` INSERT INTO entries(id, day, text, creator, creation_time, last_update, deleted, creation) VALUES ${[entryRow]} `; const revisionInsertQuery = SQL` INSERT INTO revisions(id, entry, author, text, creation_time, session, last_update, deleted) VALUES ${[revisionRow]} `; const messageData = { type: messageTypes.CREATE_ENTRY, threadID: request.threadID, creatorID: viewerID, time: Date.now(), entryID, date: request.date, text: request.text, }; const date = dateFromString(request.date); const rawEntryInfo = { id: entryID, threadID: request.threadID, text: request.text, year: date.getFullYear(), month: date.getMonth() + 1, day: date.getDate(), creationTime: request.timestamp, creatorID: viewerID, deleted: false, }; const [newMessageInfos, updatesResult] = await Promise.all([ createMessages(viewer, [messageData]), createUpdateDatasForChangedEntryInfo( viewer, null, rawEntryInfo, request.calendarQuery, ), dbQuery(entryInsertQuery), dbQuery(revisionInsertQuery), ]); return { entryID, newMessageInfos, updatesResult }; } export default createEntry; diff --git a/keyserver/src/creators/id-creator.js b/keyserver/src/creators/id-creator.js index efca7b2a4..9b82dc825 100644 --- a/keyserver/src/creators/id-creator.js +++ b/keyserver/src/creators/id-creator.js @@ -1,25 +1,25 @@ // @flow import invariant from 'invariant'; -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function createIDs( tableName: string, numIDsToCreate: number, ): Promise { if (numIDsToCreate === 0) { return []; } const idInserts = Array(numIDsToCreate).fill([tableName]); const query = SQL`INSERT INTO ids(table_name) VALUES ${idInserts}`; const [result] = await dbQuery(query); const firstNewID = result.insertId; invariant(firstNewID !== null && firstNewID !== undefined, 'should be set'); return Array.from(new Array(numIDsToCreate), (val, index) => (index + firstNewID).toString(), ); } export default createIDs; diff --git a/keyserver/src/creators/message-creator.js b/keyserver/src/creators/message-creator.js index c8e51190b..4dffa5a69 100644 --- a/keyserver/src/creators/message-creator.js +++ b/keyserver/src/creators/message-creator.js @@ -1,634 +1,634 @@ // @flow import invariant from 'invariant'; -import _pickBy from 'lodash/fp/pickBy'; +import _pickBy from 'lodash/fp/pickBy.js'; -import { permissionLookup } from 'lib/permissions/thread-permissions'; +import { permissionLookup } from 'lib/permissions/thread-permissions.js'; import { rawMessageInfoFromMessageData, shimUnsupportedRawMessageInfos, stripLocalIDs, -} from 'lib/shared/message-utils'; -import { pushTypes } from 'lib/shared/messages/message-spec'; -import { messageSpecs } from 'lib/shared/messages/message-specs'; +} from 'lib/shared/message-utils.js'; +import { pushTypes } from 'lib/shared/messages/message-spec.js'; +import { messageSpecs } from 'lib/shared/messages/message-specs.js'; import { messageTypes, messageDataLocalID, type MessageData, type RawMessageInfo, -} from 'lib/types/message-types'; -import { redisMessageTypes } from 'lib/types/redis-types'; -import { threadPermissions } from 'lib/types/thread-types'; -import { updateTypes } from 'lib/types/update-types'; -import { promiseAll } from 'lib/utils/promises'; +} from 'lib/types/message-types.js'; +import { redisMessageTypes } from 'lib/types/redis-types.js'; +import { threadPermissions } from 'lib/types/thread-types.js'; +import { updateTypes } from 'lib/types/update-types.js'; +import { promiseAll } from 'lib/utils/promises.js'; import { dbQuery, SQL, appendSQLArray, mergeOrConditions, -} from '../database/database'; +} from '../database/database.js'; import { fetchMessageInfoForLocalID, fetchMessageInfoByID, -} from '../fetchers/message-fetchers'; -import { fetchOtherSessionsForViewer } from '../fetchers/session-fetchers'; -import { fetchServerThreadInfos } from '../fetchers/thread-fetchers'; -import { sendPushNotifs } from '../push/send'; -import { handleAsyncPromise } from '../responders/handlers'; -import type { Viewer } from '../session/viewer'; -import { earliestFocusedTimeConsideredExpired } from '../shared/focused-times'; -import { publisher } from '../socket/redis'; -import { creationString } from '../utils/idempotent'; -import createIDs from './id-creator'; -import type { UpdatesForCurrentSession } from './update-creator'; -import { createUpdates } from './update-creator'; +} from '../fetchers/message-fetchers.js'; +import { fetchOtherSessionsForViewer } from '../fetchers/session-fetchers.js'; +import { fetchServerThreadInfos } from '../fetchers/thread-fetchers.js'; +import { sendPushNotifs } from '../push/send.js'; +import { handleAsyncPromise } from '../responders/handlers.js'; +import type { Viewer } from '../session/viewer.js'; +import { earliestFocusedTimeConsideredExpired } from '../shared/focused-times.js'; +import { publisher } from '../socket/redis.js'; +import { creationString } from '../utils/idempotent.js'; +import createIDs from './id-creator.js'; +import type { UpdatesForCurrentSession } from './update-creator.js'; +import { createUpdates } from './update-creator.js'; type UserThreadInfo = { +devices: Map< string, { +deviceType: string, +deviceToken: string, +codeVersion: ?string, }, >, +threadIDs: Set, +notFocusedThreadIDs: Set, +userNotMemberOfSubthreads: Set, +subthreadsCanSetToUnread: Set, }; type LatestMessagesPerUser = Map< string, $ReadOnlyMap< string, { +latestMessage: string, +latestReadMessage?: string, }, >, >; type LatestMessages = $ReadOnlyArray<{ +userID: string, +threadID: string, +latestMessage: string, +latestReadMessage: ?string, }>; // Does not do permission checks! (checkThreadPermission) async function createMessages( viewer: Viewer, messageDatas: $ReadOnlyArray, updatesForCurrentSession?: UpdatesForCurrentSession = 'return', ): Promise { if (messageDatas.length === 0) { return []; } const messageInfos: RawMessageInfo[] = []; const newMessageDatas: MessageData[] = []; const existingMessages = await Promise.all( messageDatas.map(messageData => fetchMessageInfoForLocalID(viewer, messageDataLocalID(messageData)), ), ); for (let i = 0; i < existingMessages.length; i++) { const existingMessage = existingMessages[i]; if (existingMessage) { messageInfos.push(existingMessage); } else { newMessageDatas.push(messageDatas[i]); } } if (newMessageDatas.length === 0) { return shimUnsupportedRawMessageInfos(messageInfos, viewer.platformDetails); } const ids = await createIDs('messages', newMessageDatas.length); const subthreadPermissionsToCheck: Set = new Set(); const threadsToMessageIndices: Map = new Map(); const messageInsertRows = []; for (let i = 0; i < newMessageDatas.length; i++) { const messageData = newMessageDatas[i]; const threadID = messageData.threadID; const creatorID = messageData.creatorID; if (messageData.type === messageTypes.CREATE_SUB_THREAD) { subthreadPermissionsToCheck.add(messageData.childThreadID); } let messageIndices = threadsToMessageIndices.get(threadID); if (!messageIndices) { messageIndices = []; threadsToMessageIndices.set(threadID, messageIndices); } messageIndices.push(i); const content = messageSpecs[messageData.type].messageContentForServerDB?.( messageData, ); const creation = messageData.localID && viewer.hasSessionInfo ? creationString(viewer, messageData.localID) : null; const targetMessageID = messageData.targetMessageID ? messageData.targetMessageID : null; messageInsertRows.push([ ids[i], threadID, creatorID, messageData.type, content, messageData.time, creation, targetMessageID, ]); messageInfos.push(rawMessageInfoFromMessageData(messageData, ids[i])); } if (viewer.isScriptViewer) { await postMessageSend( viewer, threadsToMessageIndices, subthreadPermissionsToCheck, stripLocalIDs(messageInfos), updatesForCurrentSession, ); } else { // We aren't awaiting because this function calls external services and we // don't want to delay the response handleAsyncPromise( postMessageSend( viewer, threadsToMessageIndices, subthreadPermissionsToCheck, stripLocalIDs(messageInfos), updatesForCurrentSession, ), ); } const messageInsertQuery = SQL` INSERT INTO messages(id, thread, user, type, content, time, creation, target_message) VALUES ${messageInsertRows} `; await Promise.all([ dbQuery(messageInsertQuery), updateRepliesCount(threadsToMessageIndices, newMessageDatas), ]); if (updatesForCurrentSession !== 'return') { return []; } return shimUnsupportedRawMessageInfos(messageInfos, viewer.platformDetails); } async function updateRepliesCount( threadsToMessageIndices: Map, newMessageDatas: MessageData[], ) { const updatedThreads = []; const updateThreads = SQL` UPDATE threads SET replies_count = replies_count + (CASE `; const membershipConditions = []; for (const [threadID, messages] of threadsToMessageIndices.entries()) { const newRepliesIncrease = messages .map(i => newMessageDatas[i].type) .filter(type => messageSpecs[type].includedInRepliesCount).length; if (newRepliesIncrease === 0) { continue; } updateThreads.append(SQL` WHEN id = ${threadID} THEN ${newRepliesIncrease} `); updatedThreads.push(threadID); const senders = messages.map(i => newMessageDatas[i].creatorID); membershipConditions.push( SQL`thread = ${threadID} AND user IN (${senders})`, ); } updateThreads.append(SQL` ELSE 0 END) WHERE id IN (${updatedThreads}) AND source_message IS NOT NULL `); const updateMemberships = SQL` UPDATE memberships SET sender = 1 WHERE sender = 0 AND ( `; updateMemberships.append(mergeOrConditions(membershipConditions)); updateMemberships.append(SQL` ) `); if (updatedThreads.length > 0) { const [{ threadInfos: serverThreadInfos }] = await Promise.all([ fetchServerThreadInfos(SQL`t.id IN (${updatedThreads})`), dbQuery(updateThreads), dbQuery(updateMemberships), ]); const time = Date.now(); const updates = []; for (const threadID in serverThreadInfos) { for (const member of serverThreadInfos[threadID].members) { updates.push({ userID: member.id, time, threadID, type: updateTypes.UPDATE_THREAD, }); } } await createUpdates(updates); } } // Handles: // (1) Sending push notifs // (2) Setting threads to unread and generating corresponding UpdateInfos // (3) Publishing to Redis so that active sockets pass on new messages async function postMessageSend( viewer: Viewer, threadsToMessageIndices: Map, subthreadPermissionsToCheck: Set, messageInfos: RawMessageInfo[], updatesForCurrentSession: UpdatesForCurrentSession, ) { let joinIndex = 0; let subthreadSelects = ''; const subthreadJoins = []; for (const subthread of subthreadPermissionsToCheck) { const index = joinIndex++; subthreadSelects += ` , stm${index}.permissions AS subthread${subthread}_permissions, stm${index}.role AS subthread${subthread}_role `; const join = SQL`LEFT JOIN memberships `; join.append(`stm${index} ON stm${index}.`); join.append(SQL`thread = ${subthread} AND `); join.append(`stm${index}.user = m.user`); subthreadJoins.push(join); } const time = earliestFocusedTimeConsideredExpired(); const visibleExtractString = `$.${threadPermissions.VISIBLE}.value`; const query = SQL` SELECT m.user, m.thread, c.platform, c.device_token, c.versions, f.user AS focused_user `; query.append(subthreadSelects); query.append(SQL` FROM memberships m LEFT JOIN cookies c ON c.user = m.user AND c.device_token IS NOT NULL LEFT JOIN focused f ON f.user = m.user AND f.thread = m.thread AND f.time > ${time} `); appendSQLArray(query, subthreadJoins, SQL` `); query.append(SQL` WHERE (m.role > 0 OR f.user IS NOT NULL) AND JSON_EXTRACT(m.permissions, ${visibleExtractString}) IS TRUE AND m.thread IN (${[...threadsToMessageIndices.keys()]}) `); const perUserInfo = new Map(); const [result] = await dbQuery(query); for (const row of result) { const userID = row.user.toString(); const threadID = row.thread.toString(); const deviceToken = row.device_token; const focusedUser = !!row.focused_user; const { platform } = row; const versions = JSON.parse(row.versions); let thisUserInfo = perUserInfo.get(userID); if (!thisUserInfo) { thisUserInfo = { devices: new Map(), threadIDs: new Set(), notFocusedThreadIDs: new Set(), userNotMemberOfSubthreads: new Set(), subthreadsCanSetToUnread: new Set(), }; perUserInfo.set(userID, thisUserInfo); // Subthread info will be the same for each subthread, so we only parse // it once for (const subthread of subthreadPermissionsToCheck) { const isSubthreadMember = row[`subthread${subthread}_role`] > 0; const rawSubthreadPermissions = row[`subthread${subthread}_permissions`]; const subthreadPermissions = JSON.parse(rawSubthreadPermissions); const canSeeSubthread = permissionLookup( subthreadPermissions, threadPermissions.KNOW_OF, ); if (!canSeeSubthread) { continue; } thisUserInfo.subthreadsCanSetToUnread.add(subthread); // Only include the notification from the superthread if there is no // notification from the subthread if ( !isSubthreadMember || !permissionLookup(subthreadPermissions, threadPermissions.VISIBLE) ) { thisUserInfo.userNotMemberOfSubthreads.add(subthread); } } } if (deviceToken) { thisUserInfo.devices.set(deviceToken, { deviceType: platform, deviceToken, codeVersion: versions ? versions.codeVersion : null, }); } thisUserInfo.threadIDs.add(threadID); if (!focusedUser) { thisUserInfo.notFocusedThreadIDs.add(threadID); } } const messageInfosPerUser = {}; const latestMessagesPerUser: LatestMessagesPerUser = new Map(); const userPushInfoPromises = {}; for (const pair of perUserInfo) { const [userID, preUserPushInfo] = pair; const userMessageInfos = []; for (const threadID of preUserPushInfo.threadIDs) { const messageIndices = threadsToMessageIndices.get(threadID); invariant(messageIndices, `indices should exist for thread ${threadID}`); for (const messageIndex of messageIndices) { const messageInfo = messageInfos[messageIndex]; userMessageInfos.push(messageInfo); } } if (userMessageInfos.length > 0) { messageInfosPerUser[userID] = userMessageInfos; } latestMessagesPerUser.set( userID, determineLatestMessagesPerThread( preUserPushInfo, userID, threadsToMessageIndices, messageInfos, ), ); const { userNotMemberOfSubthreads } = preUserPushInfo; const userDevices = [...preUserPushInfo.devices.values()]; if (userDevices.length === 0) { continue; } const userPushInfoMessageInfoPromises = []; for (const threadID of preUserPushInfo.notFocusedThreadIDs) { const messageIndices = threadsToMessageIndices.get(threadID); invariant(messageIndices, `indices should exist for thread ${threadID}`); userPushInfoMessageInfoPromises.push( ...messageIndices.map(async messageIndex => { const messageInfo = messageInfos[messageIndex]; const { type } = messageInfo; if (messageInfo.creatorID === userID) { // We never send a user notifs about their own activity return undefined; } const { generatesNotifs } = messageSpecs[type]; const doesGenerateNotif = await generatesNotifs(messageInfo, { notifTargetUserID: userID, userNotMemberOfSubthreads, fetchMessageInfoByID: (messageID: string) => fetchMessageInfoByID(viewer, messageID), }); return doesGenerateNotif === pushTypes.NOTIF ? messageInfo : undefined; }), ); } const userPushInfoPromise = (async () => { const pushMessageInfos = await Promise.all( userPushInfoMessageInfoPromises, ); const filteredMessageInfos = pushMessageInfos.filter(Boolean); if (filteredMessageInfos.length === 0) { return undefined; } return { devices: userDevices, messageInfos: filteredMessageInfos, }; })(); userPushInfoPromises[userID] = userPushInfoPromise; } const latestMessages = flattenLatestMessagesPerUser(latestMessagesPerUser); const [pushInfo] = await Promise.all([ promiseAll(userPushInfoPromises), createReadStatusUpdates(latestMessages), redisPublish(viewer, messageInfosPerUser, updatesForCurrentSession), updateLatestMessages(latestMessages), ]); await sendPushNotifs(_pickBy(Boolean)(pushInfo)); } async function redisPublish( viewer: Viewer, messageInfosPerUser: { [userID: string]: $ReadOnlyArray }, updatesForCurrentSession: UpdatesForCurrentSession, ) { const avoidBroadcastingToCurrentSession = viewer.hasSessionInfo && updatesForCurrentSession !== 'broadcast'; for (const userID in messageInfosPerUser) { if (userID === viewer.userID && avoidBroadcastingToCurrentSession) { continue; } const messageInfos = messageInfosPerUser[userID]; publisher.sendMessage( { userID }, { type: redisMessageTypes.NEW_MESSAGES, messages: messageInfos, }, ); } const viewerMessageInfos = messageInfosPerUser[viewer.userID]; if (!viewerMessageInfos || !avoidBroadcastingToCurrentSession) { return; } const sessionIDs = await fetchOtherSessionsForViewer(viewer); for (const sessionID of sessionIDs) { publisher.sendMessage( { userID: viewer.userID, sessionID }, { type: redisMessageTypes.NEW_MESSAGES, messages: viewerMessageInfos, }, ); } } function determineLatestMessagesPerThread( preUserPushInfo: UserThreadInfo, userID: string, threadsToMessageIndices: $ReadOnlyMap>, messageInfos: $ReadOnlyArray, ) { const { threadIDs, notFocusedThreadIDs, subthreadsCanSetToUnread, } = preUserPushInfo; const latestMessagesPerThread = new Map(); for (const threadID of threadIDs) { const messageIndices = threadsToMessageIndices.get(threadID); invariant(messageIndices, `indices should exist for thread ${threadID}`); for (const messageIndex of messageIndices) { const messageInfo = messageInfos[messageIndex]; if ( messageInfo.type === messageTypes.CREATE_SUB_THREAD && !subthreadsCanSetToUnread.has(messageInfo.childThreadID) ) { continue; } const messageID = messageInfo.id; invariant( messageID, 'message ID should exist in determineLatestMessagesPerThread', ); if ( notFocusedThreadIDs.has(threadID) && messageInfo.creatorID !== userID ) { latestMessagesPerThread.set(threadID, { latestMessage: messageID, }); } else { latestMessagesPerThread.set(threadID, { latestMessage: messageID, latestReadMessage: messageID, }); } } } return latestMessagesPerThread; } function flattenLatestMessagesPerUser( latestMessagesPerUser: LatestMessagesPerUser, ): LatestMessages { const result = []; for (const [userID, latestMessagesPerThread] of latestMessagesPerUser) { for (const [threadID, latestMessages] of latestMessagesPerThread) { result.push({ userID, threadID, latestMessage: latestMessages.latestMessage, latestReadMessage: latestMessages.latestReadMessage, }); } } return result; } async function createReadStatusUpdates(latestMessages: LatestMessages) { const now = Date.now(); const readStatusUpdates = latestMessages .filter(message => !message.latestReadMessage) .map(({ userID, threadID }) => ({ type: updateTypes.UPDATE_THREAD_READ_STATUS, userID, time: now, threadID, unread: true, })); if (readStatusUpdates.length === 0) { return; } return await createUpdates(readStatusUpdates); } function updateLatestMessages(latestMessages: LatestMessages) { if (latestMessages.length === 0) { return; } const query = SQL` UPDATE memberships SET `; const lastMessageExpression = SQL` last_message = GREATEST(last_message, CASE `; const lastReadMessageExpression = SQL` , last_read_message = GREATEST(last_read_message, CASE `; let shouldUpdateLastReadMessage = false; for (const { userID, threadID, latestMessage, latestReadMessage, } of latestMessages) { lastMessageExpression.append(SQL` WHEN user = ${userID} AND thread = ${threadID} THEN ${latestMessage} `); if (latestReadMessage) { shouldUpdateLastReadMessage = true; lastReadMessageExpression.append(SQL` WHEN user = ${userID} AND thread = ${threadID} THEN ${latestReadMessage} `); } } lastMessageExpression.append(SQL` ELSE last_message END) `); lastReadMessageExpression.append(SQL` ELSE last_read_message END) `); const conditions = latestMessages.map( ({ userID, threadID }) => SQL`(user = ${userID} AND thread = ${threadID})`, ); query.append(lastMessageExpression); if (shouldUpdateLastReadMessage) { query.append(lastReadMessageExpression); } query.append(SQL`WHERE `); query.append(mergeOrConditions(conditions)); return dbQuery(query); } export default createMessages; diff --git a/keyserver/src/creators/message-report-creator.js b/keyserver/src/creators/message-report-creator.js index b1d827766..af1ccc4bf 100644 --- a/keyserver/src/creators/message-report-creator.js +++ b/keyserver/src/creators/message-report-creator.js @@ -1,141 +1,141 @@ // @flow -import bots from 'lib/facts/bots'; -import { createMessageQuote } from 'lib/shared/message-utils'; -import { type MessageReportCreationRequest } from 'lib/types/message-report-types'; -import { messageTypes } from 'lib/types/message-types'; -import type { RawMessageInfo } from 'lib/types/message-types'; -import type { ServerThreadInfo } from 'lib/types/thread-types'; -import { ServerError } from 'lib/utils/errors'; -import { promiseAll } from 'lib/utils/promises'; - -import { createCommbotThread } from '../bots/commbot'; -import { fetchMessageInfoByID } from '../fetchers/message-fetchers'; +import bots from 'lib/facts/bots.js'; +import { createMessageQuote } from 'lib/shared/message-utils.js'; +import { type MessageReportCreationRequest } from 'lib/types/message-report-types.js'; +import { messageTypes } from 'lib/types/message-types.js'; +import type { RawMessageInfo } from 'lib/types/message-types.js'; +import type { ServerThreadInfo } from 'lib/types/thread-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { promiseAll } from 'lib/utils/promises.js'; + +import { createCommbotThread } from '../bots/commbot.js'; +import { fetchMessageInfoByID } from '../fetchers/message-fetchers.js'; import { fetchPersonalThreadID, serverThreadInfoFromMessageInfo, -} from '../fetchers/thread-fetchers'; +} from '../fetchers/thread-fetchers.js'; import { fetchUsername, fetchKeyserverAdminID, -} from '../fetchers/user-fetchers'; -import type { Viewer } from '../session/viewer'; -import createMessages from './message-creator'; +} from '../fetchers/user-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; +import createMessages from './message-creator.js'; const { commbot } = bots; type MessageReportData = { +reportedMessageText: ?string, +reporterUsername: ?string, +commbotThreadID: string, +reportedThread: ?ServerThreadInfo, +reportedMessageAuthor: ?string, }; async function createMessageReport( viewer: Viewer, request: MessageReportCreationRequest, ): Promise { const { reportedMessageText, reporterUsername, commbotThreadID, reportedThread, reportedMessageAuthor, } = await fetchMessageReportData(viewer, request); const reportMessage = getCommbotMessage( reporterUsername, reportedMessageAuthor, reportedThread?.name, reportedMessageText, ); const time = Date.now(); const result = await createMessages(viewer, [ { type: messageTypes.TEXT, threadID: commbotThreadID, creatorID: commbot.userID, time, text: reportMessage, }, ]); if (result.length === 0) { throw new ServerError('message_report_failed'); } return result; } async function fetchMessageReportData( viewer: Viewer, request: MessageReportCreationRequest, ): Promise { const keyserverAdminIDPromise = fetchKeyserverAdminID(); const reportedMessagePromise = fetchMessageInfoByID( viewer, request.messageID, ); const promises = {}; promises.viewerUsername = fetchUsername(viewer.id); const keyserverAdminID = await keyserverAdminIDPromise; if (!keyserverAdminID) { throw new ServerError('keyserver_admin_not_found'); } promises.commbotThreadID = getCommbotThreadID(keyserverAdminID); const reportedMessage = await reportedMessagePromise; if (reportedMessage) { promises.reportedThread = serverThreadInfoFromMessageInfo(reportedMessage); } const reportedMessageAuthorID = reportedMessage?.creatorID; if (reportedMessageAuthorID) { promises.reportedMessageAuthor = fetchUsername(reportedMessageAuthorID); } const reportedMessageText = reportedMessage?.type === 0 ? reportedMessage.text : null; const { viewerUsername, commbotThreadID, reportedThread, reportedMessageAuthor, } = await promiseAll(promises); return { reportedMessageText, reporterUsername: viewerUsername, commbotThreadID, reportedThread, reportedMessageAuthor, }; } async function getCommbotThreadID(userID: string): Promise { const commbotThreadID = await fetchPersonalThreadID(userID, commbot.userID); return commbotThreadID ?? createCommbotThread(userID); } function getCommbotMessage( reporterUsername: ?string, messageAuthorUsername: ?string, threadName: ?string, message: ?string, ): string { reporterUsername = reporterUsername ?? '[null]'; const messageAuthor = messageAuthorUsername ? `${messageAuthorUsername}’s` : 'this'; const thread = threadName ? `chat titled "${threadName}"` : 'chat'; const reply = message ? createMessageQuote(message) : 'non-text message'; return ( `${reporterUsername} reported ${messageAuthor} message in ${thread}\n` + reply ); } export default createMessageReport; diff --git a/keyserver/src/creators/one-time-keys-creator.js b/keyserver/src/creators/one-time-keys-creator.js index 63eb4e703..86badc15f 100644 --- a/keyserver/src/creators/one-time-keys-creator.js +++ b/keyserver/src/creators/one-time-keys-creator.js @@ -1,26 +1,26 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; async function saveOneTimeKeys( viewer: Viewer, oneTimeKeys: $ReadOnlyArray, ): Promise { if (oneTimeKeys.length === 0) { return; } const insertData = oneTimeKeys.map(oneTimeKey => [ viewer.session, oneTimeKey, ]); const query = SQL` INSERT INTO one_time_keys(session, one_time_key) VALUES ${insertData} `; await dbQuery(query); } export { saveOneTimeKeys }; diff --git a/keyserver/src/creators/relationship-creators.js b/keyserver/src/creators/relationship-creators.js index 3eb6a8b0c..5b82707e3 100644 --- a/keyserver/src/creators/relationship-creators.js +++ b/keyserver/src/creators/relationship-creators.js @@ -1,54 +1,54 @@ // @flow -import _flatten from 'lodash/fp/flatten'; -import _flow from 'lodash/fp/flow'; -import _groupBy from 'lodash/fp/groupBy'; -import _isEqual from 'lodash/fp/isEqual'; -import _map from 'lodash/fp/map'; -import _mapValues from 'lodash/fp/mapValues'; -import _uniqWith from 'lodash/fp/uniqWith'; -import _values from 'lodash/fp/values'; +import _flatten from 'lodash/fp/flatten.js'; +import _flow from 'lodash/fp/flow.js'; +import _groupBy from 'lodash/fp/groupBy.js'; +import _isEqual from 'lodash/fp/isEqual.js'; +import _map from 'lodash/fp/map.js'; +import _mapValues from 'lodash/fp/mapValues.js'; +import _uniqWith from 'lodash/fp/uniqWith.js'; +import _values from 'lodash/fp/values.js'; import { type UndirectedStatus, undirectedStatus, -} from 'lib/types/relationship-types'; -import { getAllTuples } from 'lib/utils/array'; +} from 'lib/types/relationship-types.js'; +import { getAllTuples } from 'lib/utils/array.js'; import { updateUndirectedRelationships, updateDatasForUserPairs, -} from '../updaters/relationship-updaters'; -import { createUpdates } from './update-creator'; +} from '../updaters/relationship-updaters.js'; +import { createUpdates } from './update-creator.js'; type QueryResult = { +thread: number, +user: number, }; async function createUndirectedRelationships( dbQueryResult: $ReadOnlyArray, setStatus: UndirectedStatus, ) { const userPairs = _flow([ _groupBy(membership => membership.thread), _mapValues(_flow([_map(membership => membership.user), getAllTuples])), _values, _flatten, _uniqWith(_isEqual), ])(dbQueryResult); const changeset = userPairs.map(([user1, user2]) => ({ user1, user2, status: setStatus, })); await updateUndirectedRelationships(changeset); if (setStatus !== undirectedStatus.KNOW_OF) { // We don't call createUpdates for KNOW_OF because the KNOW_OF // migration shouldn't lead to any changes in the userStore await createUpdates(updateDatasForUserPairs(userPairs)); } } export { createUndirectedRelationships }; diff --git a/keyserver/src/creators/report-creator.js b/keyserver/src/creators/report-creator.js index 42be33c8f..890350350 100644 --- a/keyserver/src/creators/report-creator.js +++ b/keyserver/src/creators/report-creator.js @@ -1,238 +1,238 @@ // @flow -import _isEqual from 'lodash/fp/isEqual'; +import _isEqual from 'lodash/fp/isEqual.js'; -import bots from 'lib/facts/bots'; +import bots from 'lib/facts/bots.js'; import { filterRawEntryInfosByCalendarQuery, serverEntryInfosObject, -} from 'lib/shared/entry-utils'; -import { messageTypes } from 'lib/types/message-types'; +} from 'lib/shared/entry-utils.js'; +import { messageTypes } from 'lib/types/message-types.js'; import { type ReportCreationRequest, type ReportCreationResponse, type ThreadInconsistencyReportCreationRequest, type EntryInconsistencyReportCreationRequest, type UserInconsistencyReportCreationRequest, reportTypes, -} from 'lib/types/report-types'; -import { values } from 'lib/utils/objects'; +} from 'lib/types/report-types.js'; +import { values } from 'lib/utils/objects.js'; import { sanitizeReduxReport, type ReduxCrashReport, -} from 'lib/utils/sanitization'; +} from 'lib/utils/sanitization.js'; -import { dbQuery, SQL } from '../database/database'; -import { fetchUsername } from '../fetchers/user-fetchers'; -import { handleAsyncPromise } from '../responders/handlers'; -import { createBotViewer } from '../session/bots'; -import type { Viewer } from '../session/viewer'; -import { getAndAssertCommAppURLFacts } from '../utils/urls'; -import createIDs from './id-creator'; -import createMessages from './message-creator'; +import { dbQuery, SQL } from '../database/database.js'; +import { fetchUsername } from '../fetchers/user-fetchers.js'; +import { handleAsyncPromise } from '../responders/handlers.js'; +import { createBotViewer } from '../session/bots.js'; +import type { Viewer } from '../session/viewer.js'; +import { getAndAssertCommAppURLFacts } from '../utils/urls.js'; +import createIDs from './id-creator.js'; +import createMessages from './message-creator.js'; const { commbot } = bots; async function createReport( viewer: Viewer, request: ReportCreationRequest, ): Promise { const shouldIgnore = await ignoreReport(viewer, request); if (shouldIgnore) { return null; } const [id] = await createIDs('reports', 1); let type, report, time; if (request.type === reportTypes.THREAD_INCONSISTENCY) { ({ type, time, ...report } = request); time = time ? time : Date.now(); } else if (request.type === reportTypes.ENTRY_INCONSISTENCY) { ({ type, time, ...report } = request); } else if (request.type === reportTypes.MEDIA_MISSION) { ({ type, time, ...report } = request); } else if (request.type === reportTypes.USER_INCONSISTENCY) { ({ type, time, ...report } = request); } else { ({ type, ...report } = request); time = Date.now(); const redactedReduxReport: ReduxCrashReport = sanitizeReduxReport({ preloadedState: report.preloadedState, currentState: report.currentState, actions: report.actions, }); report = { ...report, ...redactedReduxReport, }; } const row = [ id, viewer.id, type, request.platformDetails.platform, JSON.stringify(report), time, ]; const query = SQL` INSERT INTO reports (id, user, type, platform, report, creation_time) VALUES ${[row]} `; await dbQuery(query); handleAsyncPromise(sendCommbotMessage(viewer, request, id)); return { id }; } async function sendCommbotMessage( viewer: Viewer, request: ReportCreationRequest, reportID: string, ): Promise { const canGenerateMessage = getCommbotMessage(request, reportID, null); if (!canGenerateMessage) { return; } const username = await fetchUsername(viewer.id); const message = getCommbotMessage(request, reportID, username); if (!message) { return; } const time = Date.now(); await createMessages(createBotViewer(commbot.userID), [ { type: messageTypes.TEXT, threadID: commbot.staffThreadID, creatorID: commbot.userID, time, text: message, }, ]); } async function ignoreReport( viewer: Viewer, request: ReportCreationRequest, ): Promise { // The below logic is to avoid duplicate inconsistency reports if ( request.type !== reportTypes.THREAD_INCONSISTENCY && request.type !== reportTypes.ENTRY_INCONSISTENCY ) { return false; } const { type, platformDetails, time } = request; if (!time) { return false; } const { platform } = platformDetails; const query = SQL` SELECT id FROM reports WHERE user = ${viewer.id} AND type = ${type} AND platform = ${platform} AND creation_time = ${time} `; const [result] = await dbQuery(query); return result.length !== 0; } function getCommbotMessage( request: ReportCreationRequest, reportID: string, username: ?string, ): ?string { const name = username ? username : '[null]'; const { platformDetails } = request; const { platform, codeVersion } = platformDetails; const platformString = codeVersion ? `${platform} v${codeVersion}` : platform; if (request.type === reportTypes.ERROR) { const { baseDomain, basePath } = getAndAssertCommAppURLFacts(); return ( `${name} got an error :(\n` + `using ${platformString}\n` + `${baseDomain}${basePath}download_error_report/${reportID}` ); } else if (request.type === reportTypes.THREAD_INCONSISTENCY) { const nonMatchingThreadIDs = getInconsistentThreadIDsFromReport(request); const nonMatchingString = [...nonMatchingThreadIDs].join(', '); return ( `system detected inconsistency for ${name}!\n` + `using ${platformString}\n` + `occurred during ${request.action.type}\n` + `thread IDs that are inconsistent: ${nonMatchingString}` ); } else if (request.type === reportTypes.ENTRY_INCONSISTENCY) { const nonMatchingEntryIDs = getInconsistentEntryIDsFromReport(request); const nonMatchingString = [...nonMatchingEntryIDs].join(', '); return ( `system detected inconsistency for ${name}!\n` + `using ${platformString}\n` + `occurred during ${request.action.type}\n` + `entry IDs that are inconsistent: ${nonMatchingString}` ); } else if (request.type === reportTypes.USER_INCONSISTENCY) { const nonMatchingUserIDs = getInconsistentUserIDsFromReport(request); const nonMatchingString = [...nonMatchingUserIDs].join(', '); return ( `system detected inconsistency for ${name}!\n` + `using ${platformString}\n` + `occurred during ${request.action.type}\n` + `user IDs that are inconsistent: ${nonMatchingString}` ); } else if (request.type === reportTypes.MEDIA_MISSION) { const mediaMissionJSON = JSON.stringify(request.mediaMission); const success = request.mediaMission.result.success ? 'media mission success!' : 'media mission failed :('; return `${name} ${success}\n` + mediaMissionJSON; } else { return null; } } function findInconsistentObjectKeys( first: { +[id: string]: O }, second: { +[id: string]: O }, ): Set { const nonMatchingIDs = new Set(); for (const id in first) { if (!_isEqual(first[id])(second[id])) { nonMatchingIDs.add(id); } } for (const id in second) { if (!first[id]) { nonMatchingIDs.add(id); } } return nonMatchingIDs; } function getInconsistentThreadIDsFromReport( request: ThreadInconsistencyReportCreationRequest, ): Set { const { pushResult, beforeAction } = request; return findInconsistentObjectKeys(beforeAction, pushResult); } function getInconsistentEntryIDsFromReport( request: EntryInconsistencyReportCreationRequest, ): Set { const { pushResult, beforeAction, calendarQuery } = request; const filteredBeforeAction = filterRawEntryInfosByCalendarQuery( serverEntryInfosObject(values(beforeAction)), calendarQuery, ); const filteredAfterAction = filterRawEntryInfosByCalendarQuery( serverEntryInfosObject(values(pushResult)), calendarQuery, ); return findInconsistentObjectKeys(filteredBeforeAction, filteredAfterAction); } function getInconsistentUserIDsFromReport( request: UserInconsistencyReportCreationRequest, ): Set { const { beforeStateCheck, afterStateCheck } = request; return findInconsistentObjectKeys(beforeStateCheck, afterStateCheck); } export default createReport; diff --git a/keyserver/src/creators/role-creator.js b/keyserver/src/creators/role-creator.js index 43b04225d..2945a7d5f 100644 --- a/keyserver/src/creators/role-creator.js +++ b/keyserver/src/creators/role-creator.js @@ -1,295 +1,295 @@ // @flow import { type RoleInfo, threadPermissions, threadPermissionPropagationPrefixes, threadPermissionFilterPrefixes, type ThreadRolePermissionsBlob, type ThreadType, threadTypes, -} from 'lib/types/thread-types'; +} from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; -import createIDs from './id-creator'; +import { dbQuery, SQL } from '../database/database.js'; +import createIDs from './id-creator.js'; type InitialRoles = { default: RoleInfo, creator: RoleInfo, }; async function createInitialRolesForNewThread( threadID: string, threadType: ThreadType, ): Promise { const rolePermissions = getRolePermissionBlobs(threadType); const ids = await createIDs('roles', Object.values(rolePermissions).length); const time = Date.now(); const newRows = []; const namesToIDs = {}; for (const name in rolePermissions) { const id = ids.shift(); namesToIDs[name] = id; const permissionsBlob = JSON.stringify(rolePermissions[name]); newRows.push([id, threadID, name, permissionsBlob, time]); } const query = SQL` INSERT INTO roles (id, thread, name, permissions, creation_time) VALUES ${newRows} `; await dbQuery(query); const defaultRoleInfo = { id: namesToIDs.Members, name: 'Members', permissions: rolePermissions.Members, isDefault: true, }; if (!rolePermissions.Admins) { return { default: defaultRoleInfo, creator: defaultRoleInfo, }; } const adminRoleInfo = { id: namesToIDs.Admins, name: 'Admins', permissions: rolePermissions.Admins, isDefault: false, }; return { default: defaultRoleInfo, creator: adminRoleInfo, }; } type RolePermissionBlobs = { +Members: ThreadRolePermissionsBlob, +Admins?: ThreadRolePermissionsBlob, }; const { CHILD, DESCENDANT } = threadPermissionPropagationPrefixes; const { OPEN, TOP_LEVEL, OPEN_TOP_LEVEL } = threadPermissionFilterPrefixes; const OPEN_CHILD = CHILD + OPEN; const OPEN_DESCENDANT = DESCENDANT + OPEN; const TOP_LEVEL_DESCENDANT = DESCENDANT + TOP_LEVEL; const OPEN_TOP_LEVEL_DESCENDANT = DESCENDANT + OPEN_TOP_LEVEL; const voicedPermissions = { [threadPermissions.VOICED]: true, [threadPermissions.EDIT_ENTRIES]: true, [threadPermissions.EDIT_THREAD_NAME]: true, [threadPermissions.EDIT_THREAD_COLOR]: true, [threadPermissions.EDIT_THREAD_DESCRIPTION]: true, [threadPermissions.CREATE_SUBCHANNELS]: true, [threadPermissions.ADD_MEMBERS]: true, }; function getRolePermissionBlobsForCommunity( threadType: ThreadType, ): RolePermissionBlobs { const openDescendantKnowOf = OPEN_DESCENDANT + threadPermissions.KNOW_OF; const openDescendantVisible = OPEN_DESCENDANT + threadPermissions.VISIBLE; const openTopLevelDescendantJoinThread = OPEN_TOP_LEVEL_DESCENDANT + threadPermissions.JOIN_THREAD; const openChildJoinThread = OPEN_CHILD + threadPermissions.JOIN_THREAD; const genesisMemberPermissions = { [threadPermissions.KNOW_OF]: true, [threadPermissions.VISIBLE]: true, [openDescendantKnowOf]: true, [openDescendantVisible]: true, [openTopLevelDescendantJoinThread]: true, }; const baseMemberPermissions = { ...genesisMemberPermissions, [threadPermissions.LEAVE_THREAD]: true, [threadPermissions.CREATE_SIDEBARS]: true, [openChildJoinThread]: true, }; let memberPermissions; if (threadType === threadTypes.COMMUNITY_ANNOUNCEMENT_ROOT) { memberPermissions = baseMemberPermissions; } else if (threadType === threadTypes.GENESIS) { memberPermissions = genesisMemberPermissions; } else { memberPermissions = { ...baseMemberPermissions, ...voicedPermissions, }; } const descendantKnowOf = DESCENDANT + threadPermissions.KNOW_OF; const descendantVisible = DESCENDANT + threadPermissions.VISIBLE; const topLevelDescendantJoinThread = TOP_LEVEL_DESCENDANT + threadPermissions.JOIN_THREAD; const childJoinThread = CHILD + threadPermissions.JOIN_THREAD; const descendantVoiced = DESCENDANT + threadPermissions.VOICED; const descendantEditEntries = DESCENDANT + threadPermissions.EDIT_ENTRIES; const descendantEditThreadName = DESCENDANT + threadPermissions.EDIT_THREAD_NAME; const descendantEditThreadColor = DESCENDANT + threadPermissions.EDIT_THREAD_COLOR; const descendantEditThreadDescription = DESCENDANT + threadPermissions.EDIT_THREAD_DESCRIPTION; const topLevelDescendantCreateSubchannels = TOP_LEVEL_DESCENDANT + threadPermissions.CREATE_SUBCHANNELS; const topLevelDescendantCreateSidebars = TOP_LEVEL_DESCENDANT + threadPermissions.CREATE_SIDEBARS; const descendantAddMembers = DESCENDANT + threadPermissions.ADD_MEMBERS; const descendantDeleteThread = DESCENDANT + threadPermissions.DELETE_THREAD; const descendantEditPermissions = DESCENDANT + threadPermissions.EDIT_PERMISSIONS; const descendantRemoveMembers = DESCENDANT + threadPermissions.REMOVE_MEMBERS; const descendantChangeRole = DESCENDANT + threadPermissions.CHANGE_ROLE; const baseAdminPermissions = { [threadPermissions.KNOW_OF]: true, [threadPermissions.VISIBLE]: true, [threadPermissions.VOICED]: true, [threadPermissions.EDIT_ENTRIES]: true, [threadPermissions.EDIT_THREAD_NAME]: true, [threadPermissions.EDIT_THREAD_COLOR]: true, [threadPermissions.EDIT_THREAD_DESCRIPTION]: true, [threadPermissions.CREATE_SUBCHANNELS]: true, [threadPermissions.CREATE_SIDEBARS]: true, [threadPermissions.ADD_MEMBERS]: true, [threadPermissions.DELETE_THREAD]: true, [threadPermissions.REMOVE_MEMBERS]: true, [threadPermissions.CHANGE_ROLE]: true, [descendantKnowOf]: true, [descendantVisible]: true, [topLevelDescendantJoinThread]: true, [childJoinThread]: true, [descendantVoiced]: true, [descendantEditEntries]: true, [descendantEditThreadName]: true, [descendantEditThreadColor]: true, [descendantEditThreadDescription]: true, [topLevelDescendantCreateSubchannels]: true, [topLevelDescendantCreateSidebars]: true, [descendantAddMembers]: true, [descendantDeleteThread]: true, [descendantEditPermissions]: true, [descendantRemoveMembers]: true, [descendantChangeRole]: true, }; let adminPermissions; if (threadType === threadTypes.GENESIS) { adminPermissions = baseAdminPermissions; } else { adminPermissions = { ...baseAdminPermissions, [threadPermissions.LEAVE_THREAD]: true, }; } return { Members: memberPermissions, Admins: adminPermissions, }; } function getRolePermissionBlobs(threadType: ThreadType): RolePermissionBlobs { if (threadType === threadTypes.SIDEBAR) { const memberPermissions = { [threadPermissions.VOICED]: true, [threadPermissions.EDIT_THREAD_NAME]: true, [threadPermissions.EDIT_THREAD_COLOR]: true, [threadPermissions.EDIT_THREAD_DESCRIPTION]: true, [threadPermissions.ADD_MEMBERS]: true, [threadPermissions.EDIT_PERMISSIONS]: true, [threadPermissions.REMOVE_MEMBERS]: true, [threadPermissions.LEAVE_THREAD]: true, }; return { Members: memberPermissions, }; } const openDescendantKnowOf = OPEN_DESCENDANT + threadPermissions.KNOW_OF; const openDescendantVisible = OPEN_DESCENDANT + threadPermissions.VISIBLE; const openChildJoinThread = OPEN_CHILD + threadPermissions.JOIN_THREAD; if (threadType === threadTypes.PRIVATE) { const memberPermissions = { [threadPermissions.KNOW_OF]: true, [threadPermissions.VISIBLE]: true, [threadPermissions.VOICED]: true, [threadPermissions.EDIT_THREAD_COLOR]: true, [threadPermissions.EDIT_THREAD_DESCRIPTION]: true, [threadPermissions.CREATE_SIDEBARS]: true, [threadPermissions.EDIT_ENTRIES]: true, [openDescendantKnowOf]: true, [openDescendantVisible]: true, [openChildJoinThread]: true, }; return { Members: memberPermissions, }; } if (threadType === threadTypes.PERSONAL) { return { Members: { [threadPermissions.KNOW_OF]: true, [threadPermissions.VISIBLE]: true, [threadPermissions.VOICED]: true, [threadPermissions.EDIT_ENTRIES]: true, [threadPermissions.EDIT_THREAD_NAME]: true, [threadPermissions.EDIT_THREAD_COLOR]: true, [threadPermissions.EDIT_THREAD_DESCRIPTION]: true, [threadPermissions.CREATE_SIDEBARS]: true, [openDescendantKnowOf]: true, [openDescendantVisible]: true, [openChildJoinThread]: true, }, }; } const openTopLevelDescendantJoinThread = OPEN_TOP_LEVEL_DESCENDANT + threadPermissions.JOIN_THREAD; const subthreadBasePermissions = { [threadPermissions.KNOW_OF]: true, [threadPermissions.VISIBLE]: true, [threadPermissions.CREATE_SIDEBARS]: true, [threadPermissions.LEAVE_THREAD]: true, [openDescendantKnowOf]: true, [openDescendantVisible]: true, [openTopLevelDescendantJoinThread]: true, [openChildJoinThread]: true, }; if ( threadType === threadTypes.COMMUNITY_OPEN_SUBTHREAD || threadType === threadTypes.COMMUNITY_SECRET_SUBTHREAD ) { const memberPermissions = { [threadPermissions.REMOVE_MEMBERS]: true, [threadPermissions.EDIT_PERMISSIONS]: true, ...subthreadBasePermissions, ...voicedPermissions, }; return { Members: memberPermissions, }; } if ( threadType === threadTypes.COMMUNITY_OPEN_ANNOUNCEMENT_SUBTHREAD || threadType === threadTypes.COMMUNITY_SECRET_ANNOUNCEMENT_SUBTHREAD ) { return { Members: subthreadBasePermissions, }; } return getRolePermissionBlobsForCommunity(threadType); } export { createInitialRolesForNewThread, getRolePermissionBlobs }; diff --git a/keyserver/src/creators/session-creator.js b/keyserver/src/creators/session-creator.js index 99c02a99b..c69f2e33c 100644 --- a/keyserver/src/creators/session-creator.js +++ b/keyserver/src/creators/session-creator.js @@ -1,40 +1,40 @@ // @flow -import type { CalendarQuery } from 'lib/types/entry-types'; +import type { CalendarQuery } from 'lib/types/entry-types.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; async function createSession( viewer: Viewer, calendarQuery: CalendarQuery, initialLastUpdate: number, ): Promise { const time = Date.now(); const row = [ viewer.session, viewer.userID, viewer.cookieID, JSON.stringify(calendarQuery), time, initialLastUpdate, time, ]; const query = SQL` INSERT INTO sessions (id, user, cookie, query, creation_time, last_update, last_validated) VALUES ${[row]} ON DUPLICATE KEY UPDATE query = VALUE(query), last_update = VALUE(last_update), last_validated = VALUE(last_validated) `; await dbQuery(query); viewer.setSessionInfo({ lastValidated: time, lastUpdate: initialLastUpdate, calendarQuery, }); } export { createSession }; diff --git a/keyserver/src/creators/thread-creator.js b/keyserver/src/creators/thread-creator.js index af93bb40d..b66d7d2b6 100644 --- a/keyserver/src/creators/thread-creator.js +++ b/keyserver/src/creators/thread-creator.js @@ -1,509 +1,509 @@ // @flow import invariant from 'invariant'; -import bots from 'lib/facts/bots'; -import genesis from 'lib/facts/genesis'; +import bots from 'lib/facts/bots.js'; +import genesis from 'lib/facts/genesis.js'; import { generatePendingThreadColor, generateRandomColor, getThreadTypeParentRequirement, -} from 'lib/shared/thread-utils'; -import { hasMinCodeVersion } from 'lib/shared/version-utils'; -import type { Shape } from 'lib/types/core'; -import { messageTypes } from 'lib/types/message-types'; +} from 'lib/shared/thread-utils.js'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; +import type { Shape } from 'lib/types/core.js'; +import { messageTypes } from 'lib/types/message-types.js'; import { type ServerNewThreadRequest, type NewThreadResponse, threadTypes, threadPermissions, threadTypeIsCommunityRoot, -} from 'lib/types/thread-types'; -import type { UserInfos } from 'lib/types/user-types'; -import { pushAll } from 'lib/utils/array'; -import { ServerError } from 'lib/utils/errors'; -import { promiseAll } from 'lib/utils/promises'; -import { firstLine } from 'lib/utils/string-utils'; - -import { dbQuery, SQL } from '../database/database'; -import { fetchMessageInfoByID } from '../fetchers/message-fetchers'; +} from 'lib/types/thread-types.js'; +import type { UserInfos } from 'lib/types/user-types.js'; +import { pushAll } from 'lib/utils/array.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { promiseAll } from 'lib/utils/promises.js'; +import { firstLine } from 'lib/utils/string-utils.js'; + +import { dbQuery, SQL } from '../database/database.js'; +import { fetchMessageInfoByID } from '../fetchers/message-fetchers.js'; import { determineThreadAncestry, personalThreadQuery, -} from '../fetchers/thread-fetchers'; +} from '../fetchers/thread-fetchers.js'; import { checkThreadPermission, validateCandidateMembers, -} from '../fetchers/thread-permission-fetchers'; -import type { Viewer } from '../session/viewer'; +} from '../fetchers/thread-permission-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; import { changeRole, recalculateThreadPermissions, commitMembershipChangeset, getChangesetCommitResultForExistingThread, -} from '../updaters/thread-permission-updaters'; -import { joinThread } from '../updaters/thread-updaters'; -import RelationshipChangeset from '../utils/relationship-changeset'; -import createIDs from './id-creator'; -import createMessages from './message-creator'; +} from '../updaters/thread-permission-updaters.js'; +import { joinThread } from '../updaters/thread-updaters.js'; +import RelationshipChangeset from '../utils/relationship-changeset.js'; +import createIDs from './id-creator.js'; +import createMessages from './message-creator.js'; import { createInitialRolesForNewThread, getRolePermissionBlobs, -} from './role-creator'; -import type { UpdatesForCurrentSession } from './update-creator'; +} from './role-creator.js'; +import type { UpdatesForCurrentSession } from './update-creator.js'; const { commbot } = bots; const privateThreadDescription: string = 'This is your private chat, ' + 'where you can set reminders and jot notes in private!'; type CreateThreadOptions = Shape<{ +forceAddMembers: boolean, +updatesForCurrentSession: UpdatesForCurrentSession, +silentlyFailMembers: boolean, }>; // If forceAddMembers is set, we will allow the viewer to add random users who // they aren't friends with. We will only fail if the viewer is trying to add // somebody who they have blocked or has blocked them. On the other hand, if // forceAddMembers is not set, we will fail if the viewer tries to add somebody // who they aren't friends with and doesn't have a membership row with a // nonnegative role for the parent thread. async function createThread( viewer: Viewer, request: ServerNewThreadRequest, options?: CreateThreadOptions, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const forceAddMembers = options?.forceAddMembers ?? false; const updatesForCurrentSession = options?.updatesForCurrentSession ?? 'return'; const silentlyFailMembers = options?.silentlyFailMembers ?? false; const threadType = request.type; const shouldCreateRelationships = forceAddMembers || threadType === threadTypes.PERSONAL; let parentThreadID = request.parentThreadID ? request.parentThreadID : null; const initialMemberIDsFromRequest = request.initialMemberIDs && request.initialMemberIDs.length > 0 ? [...new Set(request.initialMemberIDs)] : null; const ghostMemberIDsFromRequest = request.ghostMemberIDs && request.ghostMemberIDs.length > 0 ? [...new Set(request.ghostMemberIDs)] : null; const sourceMessageID = request.sourceMessageID ? request.sourceMessageID : null; invariant( threadType !== threadTypes.SIDEBAR || sourceMessageID, 'sourceMessageID should be set for sidebar', ); const parentRequirement = getThreadTypeParentRequirement(threadType); if ( (parentRequirement === 'required' && !parentThreadID) || (parentRequirement === 'disabled' && parentThreadID) ) { throw new ServerError('invalid_parameters'); } if ( threadType === threadTypes.PERSONAL && request.initialMemberIDs?.length !== 1 ) { throw new ServerError('invalid_parameters'); } const requestParentThreadID = parentThreadID; const confirmParentPermissionPromise = (async () => { if (!requestParentThreadID) { return; } const hasParentPermission = await checkThreadPermission( viewer, requestParentThreadID, threadType === threadTypes.SIDEBAR ? threadPermissions.CREATE_SIDEBARS : threadPermissions.CREATE_SUBCHANNELS, ); if (!hasParentPermission) { throw new ServerError('invalid_credentials'); } })(); // This is a temporary hack until we release actual E2E-encrypted local // conversations. For now we are hosting all root threads on Ashoat's // keyserver, so we set them to the have the Genesis community as their // parent thread. if (!parentThreadID && !threadTypeIsCommunityRoot(threadType)) { parentThreadID = genesis.id; } const determineThreadAncestryPromise = determineThreadAncestry( parentThreadID, threadType, ); const validateMembersPromise = (async () => { const threadAncestry = await determineThreadAncestryPromise; const defaultRolePermissions = getRolePermissionBlobs(threadType).Members; const { initialMemberIDs, ghostMemberIDs } = await validateCandidateMembers( viewer, { initialMemberIDs: initialMemberIDsFromRequest, ghostMemberIDs: ghostMemberIDsFromRequest, }, { threadType, parentThreadID, containingThreadID: threadAncestry.containingThreadID, defaultRolePermissions, }, { requireRelationship: !shouldCreateRelationships }, ); if ( !silentlyFailMembers && (Number(initialMemberIDs?.length) < Number(initialMemberIDsFromRequest?.length) || Number(ghostMemberIDs?.length) < Number(ghostMemberIDsFromRequest?.length)) ) { throw new ServerError('invalid_credentials'); } return { initialMemberIDs, ghostMemberIDs }; })(); const checkPromises = {}; checkPromises.confirmParentPermission = confirmParentPermissionPromise; checkPromises.threadAncestry = determineThreadAncestryPromise; checkPromises.validateMembers = validateMembersPromise; if (sourceMessageID) { checkPromises.sourceMessage = fetchMessageInfoByID(viewer, sourceMessageID); } const { sourceMessage, threadAncestry, validateMembers: { initialMemberIDs, ghostMemberIDs }, } = await promiseAll(checkPromises); if (sourceMessage && sourceMessage.type === messageTypes.REACTION) { throw new ServerError('invalid_parameters'); } let { id } = request; if (id === null || id === undefined) { const ids = await createIDs('threads', 1); id = ids[0]; } const newRoles = await createInitialRolesForNewThread(id, threadType); const name = request.name ? firstLine(request.name) : null; const description = request.description ? request.description : null; let color = request.color ? request.color.toLowerCase() : generateRandomColor(); if (threadType === threadTypes.PERSONAL) { color = generatePendingThreadColor([ ...(request.initialMemberIDs ?? []), viewer.id, ]); } const time = Date.now(); const row = [ id, threadType, name, description, viewer.userID, time, color, parentThreadID, threadAncestry.containingThreadID, threadAncestry.community, threadAncestry.depth, newRoles.default.id, sourceMessageID, ]; let existingThreadQuery = null; if (threadType === threadTypes.PERSONAL) { const otherMemberID = initialMemberIDs?.[0]; invariant( otherMemberID, 'Other member id should be set for a PERSONAL thread', ); existingThreadQuery = personalThreadQuery(viewer.userID, otherMemberID); } else if (sourceMessageID) { existingThreadQuery = SQL` SELECT t.id FROM threads t WHERE t.source_message = ${sourceMessageID} `; } if (existingThreadQuery) { const query = SQL` INSERT INTO threads(id, type, name, description, creator, creation_time, color, parent_thread_id, containing_thread_id, community, depth, default_role, source_message) SELECT ${row} WHERE NOT EXISTS (`; query.append(existingThreadQuery).append(SQL`)`); const [result] = await dbQuery(query); if (result.affectedRows === 0) { const deleteRoles = SQL` DELETE FROM roles WHERE id IN (${newRoles.default.id}, ${newRoles.creator.id}) `; const deleteIDs = SQL` DELETE FROM ids WHERE id IN (${id}, ${newRoles.default.id}, ${newRoles.creator.id}) `; const [[existingThreadResult]] = await Promise.all([ dbQuery(existingThreadQuery), dbQuery(deleteRoles), dbQuery(deleteIDs), ]); invariant(existingThreadResult.length > 0, 'thread should exist'); const existingThreadID = existingThreadResult[0].id.toString(); let calendarQuery; if (hasMinCodeVersion(viewer.platformDetails, 87)) { invariant(request.calendarQuery, 'calendar query should exist'); calendarQuery = { ...request.calendarQuery, filters: [ ...request.calendarQuery.filters, { type: 'threads', threadIDs: [existingThreadID] }, ], }; } let joinUpdateInfos = []; let userInfos: UserInfos = {}; let newMessageInfos = []; if (threadType !== threadTypes.PERSONAL) { const joinThreadResult = await joinThread(viewer, { threadID: existingThreadID, calendarQuery, }); joinUpdateInfos = joinThreadResult.updatesResult.newUpdates; userInfos = joinThreadResult.userInfos; newMessageInfos = joinThreadResult.rawMessageInfos; } const { viewerUpdates: newUpdates, userInfos: changesetUserInfos, } = await getChangesetCommitResultForExistingThread( viewer, existingThreadID, joinUpdateInfos, { calendarQuery, updatesForCurrentSession }, ); userInfos = { ...userInfos, ...changesetUserInfos }; return { newThreadID: existingThreadID, updatesResult: { newUpdates, }, userInfos, newMessageInfos, }; } } else { const query = SQL` INSERT INTO threads(id, type, name, description, creator, creation_time, color, parent_thread_id, containing_thread_id, community, depth, default_role, source_message) VALUES ${[row]} `; await dbQuery(query); } let initialMemberPromise; if (initialMemberIDs) { initialMemberPromise = changeRole(id, initialMemberIDs, null, { setNewMembersToUnread: true, }); } let ghostMemberPromise; if (ghostMemberIDs) { ghostMemberPromise = changeRole(id, ghostMemberIDs, -1); } const [ creatorChangeset, initialMembersChangeset, ghostMembersChangeset, recalculatePermissionsChangeset, ] = await Promise.all([ changeRole(id, [viewer.userID], newRoles.creator.id), initialMemberPromise, ghostMemberPromise, recalculateThreadPermissions(id), ]); const { membershipRows: creatorMembershipRows, relationshipChangeset: creatorRelationshipChangeset, } = creatorChangeset; const { membershipRows: recalculateMembershipRows, relationshipChangeset: recalculateRelationshipChangeset, } = recalculatePermissionsChangeset; const membershipRows = [ ...creatorMembershipRows, ...recalculateMembershipRows, ]; const relationshipChangeset = new RelationshipChangeset(); relationshipChangeset.addAll(creatorRelationshipChangeset); relationshipChangeset.addAll(recalculateRelationshipChangeset); if (initialMembersChangeset) { const { membershipRows: initialMembersMembershipRows, relationshipChangeset: initialMembersRelationshipChangeset, } = initialMembersChangeset; pushAll(membershipRows, initialMembersMembershipRows); relationshipChangeset.addAll(initialMembersRelationshipChangeset); } if (ghostMembersChangeset) { const { membershipRows: ghostMembersMembershipRows, relationshipChangeset: ghostMembersRelationshipChangeset, } = ghostMembersChangeset; pushAll(membershipRows, ghostMembersMembershipRows); relationshipChangeset.addAll(ghostMembersRelationshipChangeset); } const changeset = { membershipRows, relationshipChangeset }; const { threadInfos, viewerUpdates, userInfos, } = await commitMembershipChangeset(viewer, changeset, { updatesForCurrentSession, }); const initialMemberAndCreatorIDs = initialMemberIDs ? [...initialMemberIDs, viewer.userID] : [viewer.userID]; const messageDatas = []; if (threadType !== threadTypes.SIDEBAR) { messageDatas.push({ type: messageTypes.CREATE_THREAD, threadID: id, creatorID: viewer.userID, time, initialThreadState: { type: threadType, name, parentThreadID, color, memberIDs: initialMemberAndCreatorIDs, }, }); } else { invariant(parentThreadID, 'parentThreadID should be set for sidebar'); if (!sourceMessage || sourceMessage.type === messageTypes.SIDEBAR_SOURCE) { throw new ServerError('invalid_parameters'); } messageDatas.push( { type: messageTypes.SIDEBAR_SOURCE, threadID: id, creatorID: viewer.userID, time, sourceMessage, }, { type: messageTypes.CREATE_SIDEBAR, threadID: id, creatorID: viewer.userID, time, sourceMessageAuthorID: sourceMessage.creatorID, initialThreadState: { name, parentThreadID, color, memberIDs: initialMemberAndCreatorIDs, }, }, ); } if ( parentThreadID && threadType !== threadTypes.SIDEBAR && (parentThreadID !== genesis.id || threadType === threadTypes.COMMUNITY_OPEN_SUBTHREAD || threadType === threadTypes.COMMUNITY_OPEN_ANNOUNCEMENT_SUBTHREAD) ) { messageDatas.push({ type: messageTypes.CREATE_SUB_THREAD, threadID: parentThreadID, creatorID: viewer.userID, time, childThreadID: id, }); } const newMessageInfos = await createMessages( viewer, messageDatas, updatesForCurrentSession, ); if (hasMinCodeVersion(viewer.platformDetails, 62)) { return { newThreadID: id, updatesResult: { newUpdates: viewerUpdates, }, userInfos, newMessageInfos, }; } return { newThreadInfo: threadInfos[id], updatesResult: { newUpdates: viewerUpdates, }, userInfos, newMessageInfos, }; } function createPrivateThread( viewer: Viewer, username: string, ): Promise { return createThread( viewer, { type: threadTypes.PRIVATE, name: username, description: privateThreadDescription, ghostMemberIDs: [commbot.userID], }, { forceAddMembers: true, }, ); } export { createThread, createPrivateThread, privateThreadDescription }; diff --git a/keyserver/src/creators/update-creator.js b/keyserver/src/creators/update-creator.js index 046af8817..c4b9ddd48 100644 --- a/keyserver/src/creators/update-creator.js +++ b/keyserver/src/creators/update-creator.js @@ -1,794 +1,794 @@ // @flow import invariant from 'invariant'; -import { nonThreadCalendarFilters } from 'lib/selectors/calendar-filter-selectors'; +import { nonThreadCalendarFilters } from 'lib/selectors/calendar-filter-selectors.js'; import { keyForUpdateData, keyForUpdateInfo, rawUpdateInfoFromUpdateData, -} from 'lib/shared/update-utils'; +} from 'lib/shared/update-utils.js'; import { type RawEntryInfo, type FetchEntryInfosBase, type CalendarQuery, defaultCalendarQuery, -} from 'lib/types/entry-types'; +} from 'lib/types/entry-types.js'; import { defaultNumberPerThread, type FetchMessageInfosResult, -} from 'lib/types/message-types'; +} from 'lib/types/message-types.js'; import { type UpdateTarget, redisMessageTypes, type NewUpdatesRedisMessage, -} from 'lib/types/redis-types'; -import type { RawThreadInfo } from 'lib/types/thread-types'; +} from 'lib/types/redis-types.js'; +import type { RawThreadInfo } from 'lib/types/thread-types.js'; import { type ServerUpdateInfo, type UpdateData, type RawUpdateInfo, type CreateUpdatesResult, updateTypes, -} from 'lib/types/update-types'; +} from 'lib/types/update-types.js'; import type { UserInfos, LoggedInUserInfo, OldLoggedInUserInfo, -} from 'lib/types/user-types'; -import { promiseAll } from 'lib/utils/promises'; +} from 'lib/types/user-types.js'; +import { promiseAll } from 'lib/utils/promises.js'; -import { dbQuery, SQL, mergeAndConditions } from '../database/database'; -import type { SQLStatementType } from '../database/types'; -import { deleteUpdatesByConditions } from '../deleters/update-deleters'; +import { dbQuery, SQL, mergeAndConditions } from '../database/database.js'; +import type { SQLStatementType } from '../database/types.js'; +import { deleteUpdatesByConditions } from '../deleters/update-deleters.js'; import { fetchEntryInfos, fetchEntryInfosByID, -} from '../fetchers/entry-fetchers'; -import { fetchMessageInfos } from '../fetchers/message-fetchers'; +} from '../fetchers/entry-fetchers.js'; +import { fetchMessageInfos } from '../fetchers/message-fetchers.js'; import { fetchThreadInfos, type FetchThreadInfosResult, -} from '../fetchers/thread-fetchers'; +} from '../fetchers/thread-fetchers.js'; import { fetchKnownUserInfos, fetchCurrentUserInfo, -} from '../fetchers/user-fetchers'; -import type { Viewer } from '../session/viewer'; -import { channelNameForUpdateTarget, publisher } from '../socket/redis'; -import createIDs from './id-creator'; +} from '../fetchers/user-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; +import { channelNameForUpdateTarget, publisher } from '../socket/redis.js'; +import createIDs from './id-creator.js'; export type UpdatesForCurrentSession = // This is the default if no Viewer is passed, or if an isSocket Viewer is // passed in. We will broadcast to all valid sessions via Redis and return // nothing to the caller, relying on the current session's Redis listener to // pick up the updates and deliver them asynchronously. | 'broadcast' // This is the default if a non-isSocket Viewer is passed in. We avoid // broadcasting the update to the current session, and instead return the // update to the caller, who will handle delivering it to the client. | 'return' // This means we ignore any updates destined for the current session. // Presumably the caller knows what they are doing and has a different way of // communicating the relevant information to the client. | 'ignore'; type DeleteCondition = { +userID: string, +target: ?string, +types: 'all_types' | $ReadOnlySet, }; export type ViewerInfo = | { viewer: Viewer, calendarQuery?: ?CalendarQuery, updatesForCurrentSession?: UpdatesForCurrentSession, } | { viewer: Viewer, calendarQuery: ?CalendarQuery, updatesForCurrentSession?: UpdatesForCurrentSession, threadInfos: { +[id: string]: RawThreadInfo }, }; const defaultUpdateCreationResult = { viewerUpdates: [], userInfos: {} }; const sortFunction = ( a: UpdateData | ServerUpdateInfo, b: UpdateData | ServerUpdateInfo, ) => a.time - b.time; const deleteUpdatesBatchSize = 500; // Creates rows in the updates table based on the inputed updateDatas. Returns // UpdateInfos pertaining to the provided viewerInfo, as well as related // UserInfos. If no viewerInfo is provided, no UpdateInfos will be returned. And // the update row won't have an updater column, meaning no session will be // excluded from the update. async function createUpdates( updateDatas: $ReadOnlyArray, passedViewerInfo?: ?ViewerInfo, ): Promise { if (updateDatas.length === 0) { return defaultUpdateCreationResult; } // viewer.session will throw for a script Viewer let viewerInfo = passedViewerInfo; if ( viewerInfo && (viewerInfo.viewer.isScriptViewer || !viewerInfo.viewer.loggedIn) ) { viewerInfo = null; } const sortedUpdateDatas = [...updateDatas].sort(sortFunction); const filteredUpdateDatas: UpdateData[] = []; const keyedUpdateDatas: Map = new Map(); for (const updateData of sortedUpdateDatas) { const key = keyForUpdateData(updateData); if (!key) { filteredUpdateDatas.push(updateData); continue; } const conditionKey = `${updateData.userID}|${key}`; const deleteCondition = getDeleteCondition(updateData); invariant( deleteCondition, `updateData of type ${updateData.type} has conditionKey ` + `${conditionKey} but no deleteCondition`, ); const curUpdateDatas = keyedUpdateDatas.get(conditionKey); if (!curUpdateDatas) { keyedUpdateDatas.set(conditionKey, [updateData]); continue; } const filteredCurrent = curUpdateDatas.filter(curUpdateData => filterOnDeleteCondition(curUpdateData, deleteCondition), ); if (filteredCurrent.length === 0) { keyedUpdateDatas.set(conditionKey, [updateData]); continue; } const isNewUpdateDataFiltered = !filteredCurrent.every(curUpdateData => { const curDeleteCondition = getDeleteCondition(curUpdateData); invariant( curDeleteCondition, `updateData of type ${curUpdateData.type} is in keyedUpdateDatas ` + "but doesn't have a deleteCondition", ); return filterOnDeleteCondition(updateData, curDeleteCondition); }); if (!isNewUpdateDataFiltered) { filteredCurrent.push(updateData); } keyedUpdateDatas.set(conditionKey, filteredCurrent); } for (const keyUpdateDatas of keyedUpdateDatas.values()) { filteredUpdateDatas.push(...keyUpdateDatas); } const ids = await createIDs('updates', filteredUpdateDatas.length); let updatesForCurrentSession = viewerInfo && viewerInfo.updatesForCurrentSession; if (!updatesForCurrentSession && viewerInfo) { updatesForCurrentSession = viewerInfo.viewer.isSocket ? 'broadcast' : 'return'; } else if (!updatesForCurrentSession) { updatesForCurrentSession = 'broadcast'; } const dontBroadcastSession = updatesForCurrentSession !== 'broadcast' && viewerInfo ? viewerInfo.viewer.session : null; const publishInfos: Map = new Map(); const viewerRawUpdateInfos: RawUpdateInfo[] = []; const insertRows: (?(number | string))[][] = []; const earliestTime: Map = new Map(); for (let i = 0; i < filteredUpdateDatas.length; i++) { const updateData = filteredUpdateDatas[i]; let content; if (updateData.type === updateTypes.DELETE_ACCOUNT) { content = JSON.stringify({ deletedUserID: updateData.deletedUserID }); } else if (updateData.type === updateTypes.UPDATE_THREAD) { content = JSON.stringify({ threadID: updateData.threadID }); } else if (updateData.type === updateTypes.UPDATE_THREAD_READ_STATUS) { const { threadID, unread } = updateData; content = JSON.stringify({ threadID, unread }); } else if ( updateData.type === updateTypes.DELETE_THREAD || updateData.type === updateTypes.JOIN_THREAD ) { const { threadID } = updateData; content = JSON.stringify({ threadID }); } else if (updateData.type === updateTypes.BAD_DEVICE_TOKEN) { const { deviceToken } = updateData; content = JSON.stringify({ deviceToken }); } else if (updateData.type === updateTypes.UPDATE_ENTRY) { const { entryID } = updateData; content = JSON.stringify({ entryID }); } else if (updateData.type === updateTypes.UPDATE_CURRENT_USER) { // user column contains all the info we need to construct the UpdateInfo content = null; } else if (updateData.type === updateTypes.UPDATE_USER) { const { updatedUserID } = updateData; content = JSON.stringify({ updatedUserID }); } else { invariant(false, `unrecognized updateType ${updateData.type}`); } const target = getTargetFromUpdateData(updateData); const rawUpdateInfo = rawUpdateInfoFromUpdateData(updateData, ids[i]); if (!target || !dontBroadcastSession || target !== dontBroadcastSession) { const updateTarget = target ? { userID: updateData.userID, sessionID: target } : { userID: updateData.userID }; const channelName = channelNameForUpdateTarget(updateTarget); let publishInfo = publishInfos.get(channelName); if (!publishInfo) { publishInfo = { updateTarget, rawUpdateInfos: [] }; publishInfos.set(channelName, publishInfo); } publishInfo.rawUpdateInfos.push(rawUpdateInfo); } if ( updatesForCurrentSession === 'return' && viewerInfo && updateData.userID === viewerInfo.viewer.id && (!target || target === viewerInfo.viewer.session) ) { viewerRawUpdateInfos.push(rawUpdateInfo); } if (viewerInfo && target && viewerInfo.viewer.session === target) { // In the case where this update is being created only for the current // session, there's no reason to insert a row into the updates table continue; } const key = keyForUpdateData(updateData); if (key) { const conditionKey = `${updateData.userID}|${key}`; const currentEarliestTime = earliestTime.get(conditionKey); if (!currentEarliestTime || updateData.time < currentEarliestTime) { earliestTime.set(conditionKey, updateData.time); } } const insertRow = [ ids[i], updateData.userID, updateData.type, key, content, updateData.time, dontBroadcastSession, target, ]; insertRows.push(insertRow); } type DeleteUpdatesConditions = { key: string, target?: string, types?: number[], time?: number, }; const usersByConditions: Map< string, { conditions: DeleteUpdatesConditions, users: Set, }, > = new Map(); for (const [conditionKey, keyUpdateDatas] of keyedUpdateDatas) { const deleteConditionByTarget: Map = new Map(); for (const updateData of keyUpdateDatas) { const deleteCondition = getDeleteCondition(updateData); invariant( deleteCondition, `updateData of type ${updateData.type} is in keyedUpdateDatas but ` + "doesn't have a deleteCondition", ); const { target, types } = deleteCondition; const existingDeleteCondition = deleteConditionByTarget.get(target); if (!existingDeleteCondition) { deleteConditionByTarget.set(target, deleteCondition); continue; } const existingTypes = existingDeleteCondition.types; if (existingTypes === 'all_types') { continue; } else if (types === 'all_types') { deleteConditionByTarget.set(target, deleteCondition); continue; } const mergedTypes = new Set([...types, ...existingTypes]); deleteConditionByTarget.set(target, { ...deleteCondition, types: mergedTypes, }); } for (const deleteCondition of deleteConditionByTarget.values()) { const { userID, target, types } = deleteCondition; const key = conditionKey.split('|')[1]; const conditions: DeleteUpdatesConditions = { key }; if (target) { conditions.target = target; } if (types !== 'all_types') { invariant(types.size > 0, 'deleteCondition had empty types set'); conditions.types = [...types]; } const earliestTimeForCondition = earliestTime.get(conditionKey); if (earliestTimeForCondition) { conditions.time = earliestTimeForCondition; } const conditionsKey = JSON.stringify(conditions); if (!usersByConditions.has(conditionsKey)) { usersByConditions.set(conditionsKey, { conditions, users: new Set(), }); } usersByConditions.get(conditionsKey)?.users.add(userID); } } const deleteSQLConditions: SQLStatementType[] = []; for (const { conditions, users } of usersByConditions.values()) { const sqlConditions = [ SQL`u.user IN (${[...users]})`, SQL`u.key = ${conditions.key}`, ]; if (conditions.target) { sqlConditions.push(SQL`u.target = ${conditions.target}`); } if (conditions.types) { sqlConditions.push(SQL`u.type IN (${conditions.types})`); } if (conditions.time) { sqlConditions.push(SQL`u.time < ${conditions.time}`); } deleteSQLConditions.push(mergeAndConditions(sqlConditions)); } const promises = {}; if (insertRows.length > 0) { const insertQuery = SQL` INSERT INTO updates(id, user, type, \`key\`, content, time, updater, target) `; insertQuery.append(SQL`VALUES ${insertRows}`); promises.insert = dbQuery(insertQuery); } if (publishInfos.size > 0) { promises.redis = redisPublish(publishInfos.values(), dontBroadcastSession); } if (deleteSQLConditions.length > 0) { promises.delete = (async () => { while (deleteSQLConditions.length > 0) { const batch = deleteSQLConditions.splice(0, deleteUpdatesBatchSize); await deleteUpdatesByConditions(batch); } })(); } if (viewerRawUpdateInfos.length > 0) { invariant(viewerInfo, 'should be set'); promises.updatesResult = fetchUpdateInfosWithRawUpdateInfos( viewerRawUpdateInfos, viewerInfo, ); } const { updatesResult } = await promiseAll(promises); if (!updatesResult) { return defaultUpdateCreationResult; } const { updateInfos, userInfos } = updatesResult; return { viewerUpdates: updateInfos, userInfos }; } export type FetchUpdatesResult = { +updateInfos: $ReadOnlyArray, +userInfos: UserInfos, }; async function fetchUpdateInfosWithRawUpdateInfos( rawUpdateInfos: $ReadOnlyArray, viewerInfo: ViewerInfo, ): Promise { const { viewer } = viewerInfo; const threadIDsNeedingFetch = new Set(); const entryIDsNeedingFetch = new Set(); let currentUserNeedsFetch = false; const threadIDsNeedingDetailedFetch = new Set(); // entries and messages for (const rawUpdateInfo of rawUpdateInfos) { if ( !viewerInfo.threadInfos && (rawUpdateInfo.type === updateTypes.UPDATE_THREAD || rawUpdateInfo.type === updateTypes.JOIN_THREAD) ) { threadIDsNeedingFetch.add(rawUpdateInfo.threadID); } if (rawUpdateInfo.type === updateTypes.JOIN_THREAD) { threadIDsNeedingDetailedFetch.add(rawUpdateInfo.threadID); } else if (rawUpdateInfo.type === updateTypes.UPDATE_ENTRY) { entryIDsNeedingFetch.add(rawUpdateInfo.entryID); } else if (rawUpdateInfo.type === updateTypes.UPDATE_CURRENT_USER) { currentUserNeedsFetch = true; } } const promises = {}; if (!viewerInfo.threadInfos && threadIDsNeedingFetch.size > 0) { promises.threadResult = fetchThreadInfos( viewer, SQL`t.id IN (${[...threadIDsNeedingFetch]})`, ); } let calendarQuery: ?CalendarQuery = viewerInfo.calendarQuery ? viewerInfo.calendarQuery : null; if (!calendarQuery && viewer.hasSessionInfo) { // This should only ever happen for "legacy" clients who call in without // providing this information. These clients wouldn't know how to deal with // the corresponding UpdateInfos anyways, so no reason to be worried. calendarQuery = viewer.calendarQuery; } else if (!calendarQuery) { calendarQuery = defaultCalendarQuery(viewer.platform, viewer.timeZone); } if (threadIDsNeedingDetailedFetch.size > 0) { const messageSelectionCriteria = { threadCursors: {} }; for (const threadID of threadIDsNeedingDetailedFetch) { messageSelectionCriteria.threadCursors[threadID] = false; } promises.messageInfosResult = fetchMessageInfos( viewer, messageSelectionCriteria, defaultNumberPerThread, ); const threadCalendarQuery = { ...calendarQuery, filters: [ ...nonThreadCalendarFilters(calendarQuery.filters), { type: 'threads', threadIDs: [...threadIDsNeedingDetailedFetch] }, ], }; promises.calendarResult = fetchEntryInfos(viewer, [threadCalendarQuery]); } if (entryIDsNeedingFetch.size > 0) { promises.entryInfosResult = fetchEntryInfosByID(viewer, [ ...entryIDsNeedingFetch, ]); } if (currentUserNeedsFetch) { promises.currentUserInfoResult = (async () => { const currentUserInfo = await fetchCurrentUserInfo(viewer); invariant(currentUserInfo.anonymous === undefined, 'should be logged in'); return currentUserInfo; })(); } const { threadResult, messageInfosResult, calendarResult, entryInfosResult, currentUserInfoResult, } = await promiseAll(promises); let threadInfosResult; if (viewerInfo.threadInfos) { const { threadInfos } = viewerInfo; threadInfosResult = { threadInfos }; } else if (threadResult) { threadInfosResult = threadResult; } else { threadInfosResult = { threadInfos: {} }; } return await updateInfosFromRawUpdateInfos(viewer, rawUpdateInfos, { threadInfosResult, messageInfosResult, calendarResult, entryInfosResult, currentUserInfoResult, }); } export type UpdateInfosRawData = { threadInfosResult: FetchThreadInfosResult, messageInfosResult: ?FetchMessageInfosResult, calendarResult: ?FetchEntryInfosBase, entryInfosResult: ?$ReadOnlyArray, currentUserInfoResult: ?OldLoggedInUserInfo | LoggedInUserInfo, }; async function updateInfosFromRawUpdateInfos( viewer: Viewer, rawUpdateInfos: $ReadOnlyArray, rawData: UpdateInfosRawData, ): Promise { const { threadInfosResult, messageInfosResult, calendarResult, entryInfosResult, currentUserInfoResult, } = rawData; const updateInfos = []; const userIDsToFetch = new Set(); for (const rawUpdateInfo of rawUpdateInfos) { if (rawUpdateInfo.type === updateTypes.DELETE_ACCOUNT) { updateInfos.push({ type: updateTypes.DELETE_ACCOUNT, id: rawUpdateInfo.id, time: rawUpdateInfo.time, deletedUserID: rawUpdateInfo.deletedUserID, }); } else if (rawUpdateInfo.type === updateTypes.UPDATE_THREAD) { const threadInfo = threadInfosResult.threadInfos[rawUpdateInfo.threadID]; if (!threadInfo) { console.warn( "failed to hydrate updateTypes.UPDATE_THREAD because we couldn't " + `fetch RawThreadInfo for ${rawUpdateInfo.threadID}`, ); continue; } updateInfos.push({ type: updateTypes.UPDATE_THREAD, id: rawUpdateInfo.id, time: rawUpdateInfo.time, threadInfo, }); } else if (rawUpdateInfo.type === updateTypes.UPDATE_THREAD_READ_STATUS) { updateInfos.push({ type: updateTypes.UPDATE_THREAD_READ_STATUS, id: rawUpdateInfo.id, time: rawUpdateInfo.time, threadID: rawUpdateInfo.threadID, unread: rawUpdateInfo.unread, }); } else if (rawUpdateInfo.type === updateTypes.DELETE_THREAD) { updateInfos.push({ type: updateTypes.DELETE_THREAD, id: rawUpdateInfo.id, time: rawUpdateInfo.time, threadID: rawUpdateInfo.threadID, }); } else if (rawUpdateInfo.type === updateTypes.JOIN_THREAD) { const threadInfo = threadInfosResult.threadInfos[rawUpdateInfo.threadID]; if (!threadInfo) { console.warn( "failed to hydrate updateTypes.JOIN_THREAD because we couldn't " + `fetch RawThreadInfo for ${rawUpdateInfo.threadID}`, ); continue; } const rawEntryInfos = []; invariant(calendarResult, 'should be set'); for (const entryInfo of calendarResult.rawEntryInfos) { if (entryInfo.threadID === rawUpdateInfo.threadID) { rawEntryInfos.push(entryInfo); } } const rawMessageInfos = []; invariant(messageInfosResult, 'should be set'); for (const messageInfo of messageInfosResult.rawMessageInfos) { if (messageInfo.threadID === rawUpdateInfo.threadID) { rawMessageInfos.push(messageInfo); } } updateInfos.push({ type: updateTypes.JOIN_THREAD, id: rawUpdateInfo.id, time: rawUpdateInfo.time, threadInfo, rawMessageInfos, truncationStatus: messageInfosResult.truncationStatuses[rawUpdateInfo.threadID], rawEntryInfos, }); } else if (rawUpdateInfo.type === updateTypes.BAD_DEVICE_TOKEN) { updateInfos.push({ type: updateTypes.BAD_DEVICE_TOKEN, id: rawUpdateInfo.id, time: rawUpdateInfo.time, deviceToken: rawUpdateInfo.deviceToken, }); } else if (rawUpdateInfo.type === updateTypes.UPDATE_ENTRY) { invariant(entryInfosResult, 'should be set'); const entryInfo = entryInfosResult.find( candidate => candidate.id === rawUpdateInfo.entryID, ); if (!entryInfo) { console.warn( "failed to hydrate updateTypes.UPDATE_ENTRY because we couldn't " + `fetch RawEntryInfo for ${rawUpdateInfo.entryID}`, ); continue; } updateInfos.push({ type: updateTypes.UPDATE_ENTRY, id: rawUpdateInfo.id, time: rawUpdateInfo.time, entryInfo, }); } else if (rawUpdateInfo.type === updateTypes.UPDATE_CURRENT_USER) { invariant(currentUserInfoResult, 'should be set'); updateInfos.push({ type: updateTypes.UPDATE_CURRENT_USER, id: rawUpdateInfo.id, time: rawUpdateInfo.time, currentUserInfo: currentUserInfoResult, }); } else if (rawUpdateInfo.type === updateTypes.UPDATE_USER) { updateInfos.push({ type: updateTypes.UPDATE_USER, id: rawUpdateInfo.id, time: rawUpdateInfo.time, updatedUserID: rawUpdateInfo.updatedUserID, }); userIDsToFetch.add(rawUpdateInfo.updatedUserID); } else { invariant(false, `unrecognized updateType ${rawUpdateInfo.type}`); } } let userInfos = {}; if (userIDsToFetch.size > 0) { userInfos = await fetchKnownUserInfos(viewer, [...userIDsToFetch]); } updateInfos.sort(sortFunction); // Now we'll attempt to merge UpdateInfos so that we only have one per key const updateForKey: Map = new Map(); const mergedUpdates: ServerUpdateInfo[] = []; for (const updateInfo of updateInfos) { const key = keyForUpdateInfo(updateInfo); if (!key) { mergedUpdates.push(updateInfo); continue; } else if ( updateInfo.type === updateTypes.DELETE_THREAD || updateInfo.type === updateTypes.JOIN_THREAD || updateInfo.type === updateTypes.DELETE_ACCOUNT ) { updateForKey.set(key, updateInfo); continue; } const currentUpdateInfo = updateForKey.get(key); if (!currentUpdateInfo) { updateForKey.set(key, updateInfo); } else if ( updateInfo.type === updateTypes.UPDATE_THREAD && currentUpdateInfo.type === updateTypes.UPDATE_THREAD_READ_STATUS ) { // UPDATE_THREAD trumps UPDATE_THREAD_READ_STATUS // Note that we keep the oldest UPDATE_THREAD updateForKey.set(key, updateInfo); } else if ( updateInfo.type === updateTypes.UPDATE_THREAD_READ_STATUS && currentUpdateInfo.type === updateTypes.UPDATE_THREAD_READ_STATUS ) { // If we only have UPDATE_THREAD_READ_STATUS, keep the most recent updateForKey.set(key, updateInfo); } else if (updateInfo.type === updateTypes.UPDATE_ENTRY) { updateForKey.set(key, updateInfo); } else if (updateInfo.type === updateTypes.UPDATE_CURRENT_USER) { updateForKey.set(key, updateInfo); } } for (const [, updateInfo] of updateForKey) { mergedUpdates.push(updateInfo); } mergedUpdates.sort(sortFunction); return { updateInfos: mergedUpdates, userInfos }; } type PublishInfo = { updateTarget: UpdateTarget, rawUpdateInfos: RawUpdateInfo[], }; async function redisPublish( publishInfos: Iterator, dontBroadcastSession: ?string, ): Promise { for (const publishInfo of publishInfos) { const { updateTarget, rawUpdateInfos } = publishInfo; const redisMessage: NewUpdatesRedisMessage = { type: redisMessageTypes.NEW_UPDATES, updates: rawUpdateInfos, }; if (!updateTarget.sessionID && dontBroadcastSession) { redisMessage.ignoreSession = dontBroadcastSession; } publisher.sendMessage(updateTarget, redisMessage); } } function getTargetFromUpdateData(updateData: UpdateData): ?string { if (updateData.targetSession) { return updateData.targetSession; } else if (updateData.targetCookie) { return updateData.targetCookie; } else { return null; } } function getDeleteCondition(updateData: UpdateData): ?DeleteCondition { let types; if (updateData.type === updateTypes.DELETE_ACCOUNT) { types = new Set([updateTypes.DELETE_ACCOUNT, updateTypes.UPDATE_USER]); } else if (updateData.type === updateTypes.UPDATE_THREAD) { types = new Set([ updateTypes.UPDATE_THREAD, updateTypes.UPDATE_THREAD_READ_STATUS, ]); } else if (updateData.type === updateTypes.UPDATE_THREAD_READ_STATUS) { types = new Set([updateTypes.UPDATE_THREAD_READ_STATUS]); } else if ( updateData.type === updateTypes.DELETE_THREAD || updateData.type === updateTypes.JOIN_THREAD ) { types = 'all_types'; } else if (updateData.type === updateTypes.UPDATE_ENTRY) { types = 'all_types'; } else if (updateData.type === updateTypes.UPDATE_CURRENT_USER) { types = new Set([updateTypes.UPDATE_CURRENT_USER]); } else if (updateData.type === updateTypes.UPDATE_USER) { types = new Set([updateTypes.UPDATE_USER]); } else { return null; } const target = getTargetFromUpdateData(updateData); const { userID } = updateData; return { userID, target, types }; } function filterOnDeleteCondition( updateData: UpdateData, deleteCondition: DeleteCondition, ): boolean { invariant( updateData.userID === deleteCondition.userID, `updateData of type ${updateData.type} being compared to wrong userID`, ); if (deleteCondition.target) { const target = getTargetFromUpdateData(updateData); if (target !== deleteCondition.target) { return true; } } if (deleteCondition.types === 'all_types') { return false; } return !deleteCondition.types.has(updateData.type); } export { createUpdates, fetchUpdateInfosWithRawUpdateInfos }; diff --git a/keyserver/src/creators/upload-creator.js b/keyserver/src/creators/upload-creator.js index d18d28ad5..169c86ef2 100644 --- a/keyserver/src/creators/upload-creator.js +++ b/keyserver/src/creators/upload-creator.js @@ -1,71 +1,71 @@ // @flow import crypto from 'crypto'; -import { shimUploadURI } from 'lib/media/media-utils'; +import { shimUploadURI } from 'lib/media/media-utils.js'; import type { MediaType, UploadMultimediaResult, Dimensions, -} from 'lib/types/media-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/media-types.js'; +import { ServerError } from 'lib/utils/errors.js'; -import { dbQuery, SQL } from '../database/database'; -import { getUploadURL } from '../fetchers/upload-fetchers'; -import type { Viewer } from '../session/viewer'; -import createIDs from './id-creator'; +import { dbQuery, SQL } from '../database/database.js'; +import { getUploadURL } from '../fetchers/upload-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; +import createIDs from './id-creator.js'; export type UploadInput = { name: string, mime: string, mediaType: MediaType, buffer: Buffer, dimensions: Dimensions, loop: boolean, }; async function createUploads( viewer: Viewer, uploadInfos: $ReadOnlyArray, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const ids = await createIDs('uploads', uploadInfos.length); const uploadRows = uploadInfos.map(uploadInfo => { const id = ids.shift(); const secret = crypto.randomBytes(8).toString('hex'); const { dimensions, mediaType, loop } = uploadInfo; return { uploadResult: { id, uri: shimUploadURI(getUploadURL(id, secret), viewer.platformDetails), dimensions, mediaType, loop, }, insert: [ id, viewer.userID, mediaType, uploadInfo.name, uploadInfo.mime, uploadInfo.buffer, secret, Date.now(), JSON.stringify({ ...dimensions, loop }), ], }; }); const insertQuery = SQL` INSERT INTO uploads(id, uploader, type, filename, mime, content, secret, creation_time, extra) VALUES ${uploadRows.map(({ insert }) => insert)} `; await dbQuery(insertQuery); return uploadRows.map(({ uploadResult }) => uploadResult); } export default createUploads; diff --git a/keyserver/src/cron/backups.js b/keyserver/src/cron/backups.js index 371330b30..7fa35bd35 100644 --- a/keyserver/src/cron/backups.js +++ b/keyserver/src/cron/backups.js @@ -1,277 +1,277 @@ // @flow import childProcess from 'child_process'; import dateFormat from 'dateformat'; import fs from 'fs'; import invariant from 'invariant'; import { ReReadable } from 'rereadable-stream'; import { PassThrough } from 'stream'; import { promisify } from 'util'; import zlib from 'zlib'; -import { getDBConfig, type DBConfig } from '../database/db-config'; -import { importJSON } from '../utils/import-json'; +import { getDBConfig, type DBConfig } from '../database/db-config.js'; +import { importJSON } from '../utils/import-json.js'; const readdir = promisify(fs.readdir); const lstat = promisify(fs.lstat); const unlink = promisify(fs.unlink); type BackupConfig = { +enabled: boolean, +directory: string, +maxDirSizeMiB?: ?number, }; function getBackupConfig(): Promise { return importJSON({ folder: 'facts', name: 'backups' }); } async function backupDB() { const [backupConfig, dbConfig] = await Promise.all([ getBackupConfig(), getDBConfig(), ]); if (!backupConfig || !backupConfig.enabled) { return; } const dateString = dateFormat('yyyy-mm-dd-HH:MM'); const filename = `comm.${dateString}.sql.gz`; const filePath = `${backupConfig.directory}/${filename}`; const rawStream = new PassThrough(); (async () => { try { await mysqldump(dbConfig, filename, rawStream, ['--no-data'], { end: false, }); } catch {} try { const ignoreReports = `--ignore-table=${dbConfig.database}.reports`; await mysqldump(dbConfig, filename, rawStream, [ '--no-create-info', ignoreReports, ]); } catch { rawStream.end(); } })(); const gzippedBuffer = new ReReadable(); rawStream .on('error', (e: Error) => { console.warn(`mysqldump stdout stream emitted error for ${filename}`, e); }) .pipe(zlib.createGzip()) .on('error', (e: Error) => { console.warn(`gzip transform stream emitted error for ${filename}`, e); }) .pipe(gzippedBuffer); try { await saveBackup(filename, filePath, gzippedBuffer); } catch (e) { console.warn(`saveBackup threw for ${filename}`, e); await unlink(filePath); } await deleteOldBackupsIfSpaceExceeded(); } function mysqldump( dbConfig: DBConfig, filename: string, rawStream: PassThrough, extraParams: $ReadOnlyArray, pipeParams?: { end?: boolean, ... }, ): Promise { const mysqlDump = childProcess.spawn( 'mysqldump', [ '-h', dbConfig.host, '-u', dbConfig.user, `-p${dbConfig.password}`, '--single-transaction', '--no-tablespaces', '--default-character-set=utf8mb4', '--net-buffer-length=523264', ...extraParams, dbConfig.database, ], { stdio: ['ignore', 'pipe', 'ignore'], }, ); const extraParamsString = extraParams.join(' '); return new Promise((resolve, reject) => { mysqlDump.on('error', (e: Error) => { console.warn( `error trying to spawn mysqldump ${extraParamsString} for ${filename}`, e, ); reject(e); }); mysqlDump.on('exit', (code: number | null, signal: string | null) => { if (signal !== null && signal !== undefined) { console.warn( `mysqldump ${extraParamsString} received signal ${signal} for ` + filename, ); reject(new Error(`mysqldump ${JSON.stringify({ code, signal })}`)); } else if (code !== null && code !== 0) { console.warn( `mysqldump ${extraParamsString} exited with code ${code} for ` + filename, ); reject(new Error(`mysqldump ${JSON.stringify({ code, signal })}`)); } resolve(); }); mysqlDump.stdout.pipe(rawStream, pipeParams); }); } async function saveBackup( filename: string, filePath: string, gzippedBuffer: ReReadable, retries: number = 2, ): Promise { try { await trySaveBackup(filename, filePath, gzippedBuffer); } catch (saveError) { if (saveError.code !== 'ENOSPC') { throw saveError; } if (!retries) { throw saveError; } try { await deleteOldestBackup(); } catch (deleteError) { if (deleteError.message === 'no_backups_left') { throw saveError; } else { throw deleteError; } } await saveBackup(filename, filePath, gzippedBuffer, retries - 1); } } const backupWatchFrequency = 60 * 1000; function trySaveBackup( filename: string, filePath: string, gzippedBuffer: ReReadable, ): Promise { const timeoutObject: { timeout: ?TimeoutID } = { timeout: null }; const setBackupTimeout = (alreadyWaited: number) => { timeoutObject.timeout = setTimeout(() => { const nowWaited = alreadyWaited + backupWatchFrequency; console.log( `writing backup for ${filename} has taken ${nowWaited}ms so far`, ); setBackupTimeout(nowWaited); }, backupWatchFrequency); }; setBackupTimeout(0); const writeStream = fs.createWriteStream(filePath); return new Promise((resolve, reject) => { gzippedBuffer .rewind() .pipe(writeStream) .on('finish', () => { clearTimeout(timeoutObject.timeout); resolve(); }) .on('error', (e: Error) => { clearTimeout(timeoutObject.timeout); console.warn(`write stream emitted error for ${filename}`, e); reject(e); }); }); } async function deleteOldestBackup() { const sortedBackupInfos = await getSortedBackupInfos(); if (sortedBackupInfos.length === 0) { throw new Error('no_backups_left'); } const oldestFilename = sortedBackupInfos[0].filename; await deleteBackup(oldestFilename); } async function deleteBackup(filename: string) { const backupConfig = await getBackupConfig(); invariant(backupConfig, 'backupConfig should be non-null'); try { await unlink(`${backupConfig.directory}/${filename}`); } catch (e) { // Check if it's already been deleted if (e.code !== 'ENOENT') { throw e; } } } type BackupInfo = { +filename: string, +lastModifiedTime: number, +bytes: number, }; async function getSortedBackupInfos(): Promise { const backupConfig = await getBackupConfig(); invariant(backupConfig, 'backupConfig should be non-null'); const filenames = await readdir(backupConfig.directory); const backups = await Promise.all( filenames.map(async filename => { if (!filename.startsWith('comm.') || !filename.endsWith('.sql.gz')) { return null; } const stats = await lstat(`${backupConfig.directory}/${filename}`); if (stats.isDirectory()) { return null; } return { filename, lastModifiedTime: stats.mtime, bytes: stats.size, }; }), ); const filteredBackups = backups.filter(Boolean); filteredBackups.sort((a, b) => a.lastModifiedTime - b.lastModifiedTime); return filteredBackups; } async function deleteOldBackupsIfSpaceExceeded() { const backupConfig = await getBackupConfig(); invariant(backupConfig, 'backupConfig should be non-null'); const { maxDirSizeMiB } = backupConfig; if (!maxDirSizeMiB) { return; } const sortedBackupInfos = await getSortedBackupInfos(); const mostRecentBackup = sortedBackupInfos.pop(); let bytesLeft = maxDirSizeMiB * 1024 * 1024 - mostRecentBackup.bytes; const deleteBackupPromises = []; for (let i = sortedBackupInfos.length - 1; i >= 0; i--) { const backupInfo = sortedBackupInfos[i]; bytesLeft -= backupInfo.bytes; if (bytesLeft <= 0) { deleteBackupPromises.push(deleteBackup(backupInfo.filename)); } } await Promise.all(deleteBackupPromises); } export { backupDB }; diff --git a/keyserver/src/cron/cron.js b/keyserver/src/cron/cron.js index 8142a4175..1f79b608a 100644 --- a/keyserver/src/cron/cron.js +++ b/keyserver/src/cron/cron.js @@ -1,93 +1,93 @@ // @flow import cluster from 'cluster'; import schedule from 'node-schedule'; -import { deleteOrphanedActivity } from '../deleters/activity-deleters'; -import { deleteExpiredCookies } from '../deleters/cookie-deleters'; -import { deleteOrphanedDays } from '../deleters/day-deleters'; -import { deleteOrphanedEntries } from '../deleters/entry-deleters'; -import { deleteOrphanedMemberships } from '../deleters/membership-deleters'; -import { deleteOrphanedMessages } from '../deleters/message-deleters'; -import { deleteOrphanedNotifs } from '../deleters/notif-deleters'; -import { deleteOrphanedRevisions } from '../deleters/revision-deleters'; -import { deleteOrphanedRoles } from '../deleters/role-deleters'; +import { deleteOrphanedActivity } from '../deleters/activity-deleters.js'; +import { deleteExpiredCookies } from '../deleters/cookie-deleters.js'; +import { deleteOrphanedDays } from '../deleters/day-deleters.js'; +import { deleteOrphanedEntries } from '../deleters/entry-deleters.js'; +import { deleteOrphanedMemberships } from '../deleters/membership-deleters.js'; +import { deleteOrphanedMessages } from '../deleters/message-deleters.js'; +import { deleteOrphanedNotifs } from '../deleters/notif-deleters.js'; +import { deleteOrphanedRevisions } from '../deleters/revision-deleters.js'; +import { deleteOrphanedRoles } from '../deleters/role-deleters.js'; import { deleteOrphanedSessions, deleteOldWebSessions, -} from '../deleters/session-deleters'; +} from '../deleters/session-deleters.js'; import { deleteStaleSIWENonceEntries } from '../deleters/siwe-nonce-deleters.js'; -import { deleteInaccessibleThreads } from '../deleters/thread-deleters'; -import { deleteExpiredUpdates } from '../deleters/update-deleters'; -import { deleteUnassignedUploads } from '../deleters/upload-deleters'; -import { backupDB } from './backups'; -import { createDailyUpdatesThread } from './daily-updates'; -import { updateAndReloadGeoipDB } from './update-geoip-db'; +import { deleteInaccessibleThreads } from '../deleters/thread-deleters.js'; +import { deleteExpiredUpdates } from '../deleters/update-deleters.js'; +import { deleteUnassignedUploads } from '../deleters/upload-deleters.js'; +import { backupDB } from './backups.js'; +import { createDailyUpdatesThread } from './daily-updates.js'; +import { updateAndReloadGeoipDB } from './update-geoip-db.js'; if (cluster.isMaster) { schedule.scheduleJob( '30 3 * * *', // every day at 3:30 AM Pacific Time async () => { try { // Do everything one at a time to reduce load since we're in no hurry, // and since some queries depend on previous ones. await deleteExpiredCookies(); await deleteInaccessibleThreads(); await deleteOrphanedMemberships(); await deleteOrphanedDays(); await deleteOrphanedEntries(); await deleteOrphanedRevisions(); await deleteOrphanedRoles(); await deleteOrphanedMessages(); await deleteOrphanedActivity(); await deleteOrphanedNotifs(); await deleteOrphanedSessions(); await deleteOldWebSessions(); await deleteExpiredUpdates(); await deleteUnassignedUploads(); await deleteStaleSIWENonceEntries(); } catch (e) { console.warn('encountered error while trying to clean database', e); } }, ); schedule.scheduleJob( '0 */4 * * *', // every four hours async () => { try { await backupDB(); } catch (e) { console.warn('encountered error while trying to backup database', e); } }, ); schedule.scheduleJob( '0 3 ? * 0', // every Sunday at 3:00 AM GMT async () => { try { await updateAndReloadGeoipDB(); } catch (e) { console.warn( 'encountered error while trying to update GeoIP database', e, ); } }, ); schedule.scheduleJob( '0 0 * * *', // every day at midnight GMT async () => { try { if (process.env.RUN_COMM_TEAM_DEV_SCRIPTS) { // This is a job that the Comm internal team uses await createDailyUpdatesThread(); } } catch (e) { console.warn( 'encountered error while trying to create daily updates thread', e, ); } }, ); } diff --git a/keyserver/src/cron/daily-updates.js b/keyserver/src/cron/daily-updates.js index 9ed0cf094..8f909f811 100644 --- a/keyserver/src/cron/daily-updates.js +++ b/keyserver/src/cron/daily-updates.js @@ -1,101 +1,101 @@ // @flow import invariant from 'invariant'; -import ashoat from 'lib/facts/ashoat'; -import { messageTypes } from 'lib/types/message-types'; -import { threadTypes } from 'lib/types/thread-types'; +import ashoat from 'lib/facts/ashoat.js'; +import { messageTypes } from 'lib/types/message-types.js'; +import { threadTypes } from 'lib/types/thread-types.js'; import { getDate, dateString, prettyDateWithoutYear, prettyDateWithoutDay, -} from 'lib/utils/date-utils'; +} from 'lib/utils/date-utils.js'; -import createMessages from '../creators/message-creator'; -import { createThread } from '../creators/thread-creator'; -import { fetchEntryInfosForThreadThisWeek } from '../fetchers/entry-fetchers'; -import { createScriptViewer } from '../session/scripts'; +import createMessages from '../creators/message-creator.js'; +import { createThread } from '../creators/thread-creator.js'; +import { fetchEntryInfosForThreadThisWeek } from '../fetchers/entry-fetchers.js'; +import { createScriptViewer } from '../session/scripts.js'; const devUpdateThread = '1358777'; const weeklyDevSyncScheduleThread = '4138372'; const dailyUpdateMessage = (dateWithoutYear: string, dateWithoutDay: string) => `### ${dateWithoutDay} update Share your updates for ${dateWithoutYear} here please!`; const dateIsWeekend = (date: Date) => date.getDay() === 0 || date.getDay() === 6; // This function will do something four days a week. It skips Saturday and // Sunday. The hard part is the third skipped day, which is the day of the // weekly dev sync. By default this is Monday, but if the dev sync is on a // different day, then an admin will put a calendar entry in the // weeklyDevSyncScheduleThread indicating which day to skip. async function createDailyUpdatesThread() { if (!process.env.RUN_COMM_TEAM_DEV_SCRIPTS) { // This is a job that the Comm internal team uses return; } const viewer = createScriptViewer(ashoat.id); const now = new Date(); if (dateIsWeekend(now)) { // nothing happens on Saturday or Sunday return; } // Figure out which day the dev sync is on let devSyncDay = 1; // default to Monday const entryInfosInDevSyncScheduleThreadThisWeek = await fetchEntryInfosForThreadThisWeek( viewer, weeklyDevSyncScheduleThread, ); for (const entryInfo of entryInfosInDevSyncScheduleThreadThisWeek) { const entryInfoDate = getDate( entryInfo.year, entryInfo.month, entryInfo.day, ); if (dateIsWeekend(entryInfoDate)) { // Ignore calendar entries on weekend continue; } devSyncDay = entryInfoDate.getDay(); // Use the newest entryInfo. fetchEntryInfos sorts by creation time break; } if (devSyncDay === now.getDay()) { // Skip the dev sync day return; } const dayString = dateString(now); const dateWithoutYear = prettyDateWithoutYear(dayString); const dateWithoutDay = prettyDateWithoutDay(dayString); const [{ id: messageID }] = await createMessages(viewer, [ { type: messageTypes.TEXT, threadID: devUpdateThread, creatorID: ashoat.id, time: Date.now(), text: dailyUpdateMessage(dateWithoutYear, dateWithoutDay), }, ]); invariant( messageID, 'message returned from createMessages always has ID set', ); await createThread(viewer, { type: threadTypes.SIDEBAR, parentThreadID: devUpdateThread, name: `${dateWithoutDay} update`, sourceMessageID: messageID, }); } export { createDailyUpdatesThread }; diff --git a/keyserver/src/cron/update-geoip-db.js b/keyserver/src/cron/update-geoip-db.js index 7e2281bb3..4f14efe6c 100644 --- a/keyserver/src/cron/update-geoip-db.js +++ b/keyserver/src/cron/update-geoip-db.js @@ -1,62 +1,62 @@ // @flow import childProcess from 'child_process'; import cluster from 'cluster'; import geoip from 'geoip-lite'; -import { handleAsyncPromise } from '../responders/handlers'; -import { importJSON } from '../utils/import-json'; +import { handleAsyncPromise } from '../responders/handlers.js'; +import { importJSON } from '../utils/import-json.js'; async function updateGeoipDB(): Promise { const geoipLicense = await importJSON({ folder: 'secrets', name: 'geoip_license', }); if (!geoipLicense) { console.log('no keyserver/secrets/geoip_license.json so skipping update'); return; } await spawnUpdater(geoipLicense); } function spawnUpdater(geoipLicense: { key: string }): Promise { const spawned = childProcess.spawn(process.execPath, [ '../node_modules/geoip-lite/scripts/updatedb.js', `license_key=${geoipLicense.key}`, ]); return new Promise((resolve, reject) => { spawned.on('error', reject); spawned.on('exit', () => resolve()); }); } function reloadGeoipDB(): Promise { return new Promise(resolve => geoip.reloadData(resolve)); } type IPCMessage = { type: 'geoip_reload', }; const reloadMessage: IPCMessage = { type: 'geoip_reload' }; async function updateAndReloadGeoipDB(): Promise { await updateGeoipDB(); await reloadGeoipDB(); if (!cluster.isMaster) { return; } for (const id in cluster.workers) { cluster.workers[Number(id)].send(reloadMessage); } } if (!cluster.isMaster) { process.on('message', (ipcMessage: IPCMessage) => { if (ipcMessage.type === 'geoip_reload') { handleAsyncPromise(reloadGeoipDB()); } }); } export { updateGeoipDB, updateAndReloadGeoipDB }; diff --git a/keyserver/src/database/database.js b/keyserver/src/database/database.js index 8d6e9d6d8..80274456a 100644 --- a/keyserver/src/database/database.js +++ b/keyserver/src/database/database.js @@ -1,206 +1,206 @@ // @flow import type { ConnectionOptions, QueryResults, PoolOptions } from 'mysql'; import mysql from 'mysql2'; -import mysqlPromise from 'mysql2/promise'; +import mysqlPromise from 'mysql2/promise.js'; import SQL from 'sql-template-strings'; -import { getScriptContext } from '../scripts/script-context'; -import { connectionLimit, queryWarnTime } from './consts'; -import { getDBConfig } from './db-config'; -import DatabaseMonitor from './monitor'; -import type { Pool, SQLOrString, SQLStatementType } from './types'; +import { getScriptContext } from '../scripts/script-context.js'; +import { connectionLimit, queryWarnTime } from './consts.js'; +import { getDBConfig } from './db-config.js'; +import DatabaseMonitor from './monitor.js'; +import type { Pool, SQLOrString, SQLStatementType } from './types.js'; const SQLStatement: SQLStatementType = SQL.SQLStatement; let migrationConnection; async function getMigrationConnection() { if (migrationConnection) { return migrationConnection; } const { dbType, ...dbConfig } = await getDBConfig(); const options: ConnectionOptions = dbConfig; migrationConnection = await mysqlPromise.createConnection(options); return migrationConnection; } let pool, databaseMonitor; async function loadPool(): Promise { if (pool) { return pool; } const scriptContext = getScriptContext(); const { dbType, ...dbConfig } = await getDBConfig(); const options: PoolOptions = { ...dbConfig, connectionLimit, multipleStatements: !!( scriptContext && scriptContext.allowMultiStatementSQLQueries ), }; // This function can be run asynchronously multiple times, // the previous check is not enough because the function will await // on `getDBConfig()` and as result we might get there // while the pool is already defined, which will result with // creating a new pool and losing the previous one which will stay open if (pool) { return pool; } pool = mysqlPromise.createPool(options); databaseMonitor = new DatabaseMonitor(pool); return pool; } function endPool() { pool?.end(); } function appendSQLArray( sql: SQLStatementType, sqlArray: $ReadOnlyArray, delimeter: SQLOrString, ): SQLStatementType { if (sqlArray.length === 0) { return sql; } const [first, ...rest] = sqlArray; sql.append(first); if (rest.length === 0) { return sql; } return rest.reduce( (prev: SQLStatementType, curr: SQLStatementType) => prev.append(delimeter).append(curr), sql, ); } function mergeConditions( conditions: $ReadOnlyArray, delimiter: SQLStatementType, ): SQLStatementType { const sql = SQL` (`; appendSQLArray(sql, conditions, delimiter); sql.append(SQL`) `); return sql; } function mergeAndConditions( andConditions: $ReadOnlyArray, ): SQLStatementType { return mergeConditions(andConditions, SQL` AND `); } function mergeOrConditions( andConditions: $ReadOnlyArray, ): SQLStatementType { return mergeConditions(andConditions, SQL` OR `); } // We use this fake result for dry runs const fakeResult: QueryResults = (() => { const result: any = []; result.insertId = -1; return result; })(); const MYSQL_DEADLOCK_ERROR_CODE = 1213; type ConnectionContext = { +migrationsActive?: boolean, }; let connectionContext = { migrationsActive: false, }; function setConnectionContext(newContext: ConnectionContext) { connectionContext = { ...connectionContext, ...newContext, }; if (!connectionContext.migrationsActive && migrationConnection) { migrationConnection.end(); migrationConnection = undefined; } } type QueryOptions = { +triesLeft?: number, +multipleStatements?: boolean, }; async function dbQuery( statement: SQLStatementType, options?: QueryOptions, ): Promise { const triesLeft = options?.triesLeft ?? 2; const multipleStatements = options?.multipleStatements ?? false; let connection; if (connectionContext.migrationsActive) { connection = await getMigrationConnection(); } if (multipleStatements) { connection = await getMultipleStatementsConnection(); } if (!connection) { connection = await loadPool(); } const timeoutID = setTimeout( () => databaseMonitor.reportLaggingQuery(statement.sql), queryWarnTime, ); const scriptContext = getScriptContext(); try { const sql = statement.sql.trim(); if ( scriptContext && scriptContext.dryRun && (sql.startsWith('INSERT') || sql.startsWith('DELETE') || sql.startsWith('UPDATE')) ) { console.log(rawSQL(statement)); return ([fakeResult]: any); } return await connection.query(statement); } catch (e) { if (e.errno === MYSQL_DEADLOCK_ERROR_CODE && triesLeft > 0) { console.log('deadlock occurred, trying again', e); return await dbQuery(statement, { ...options, triesLeft: triesLeft - 1 }); } e.query = statement.sql; throw e; } finally { clearTimeout(timeoutID); if (multipleStatements) { connection.end(); } } } function rawSQL(statement: SQLStatementType): string { return mysql.format(statement.sql, statement.values); } async function getMultipleStatementsConnection() { const { dbType, ...dbConfig } = await getDBConfig(); const options: ConnectionOptions = { ...dbConfig, multipleStatements: true, }; return await mysqlPromise.createConnection(options); } export { endPool, SQL, SQLStatement, appendSQLArray, mergeAndConditions, mergeOrConditions, setConnectionContext, dbQuery, rawSQL, }; diff --git a/keyserver/src/database/db-config.js b/keyserver/src/database/db-config.js index 3574ba6e7..55082fd61 100644 --- a/keyserver/src/database/db-config.js +++ b/keyserver/src/database/db-config.js @@ -1,70 +1,70 @@ // @flow import invariant from 'invariant'; -import { importJSON } from '../utils/import-json'; +import { importJSON } from '../utils/import-json.js'; type DBType = 'mariadb10.8'; export type DBConfig = { +host: string, +user: string, +password: string, +database: string, +dbType: DBType, }; function assertValidDBType(dbType: ?string): DBType { invariant( dbType, 'dbType not specified in DB config. Following the MySQL deprecation this ' + 'is a required parameter. Please follow this Gist to migrate to ' + 'MariaDB: ' + 'https://gist.github.com/Ashoat/3a5ded2549db082c5516606f3c3c5da5', ); invariant( dbType !== 'mysql5.7', 'We no longer support MySQL. Please follow this Gist to migrate to ' + 'MariaDB: ' + 'https://gist.github.com/Ashoat/3a5ded2549db082c5516606f3c3c5da5', ); invariant(dbType === 'mariadb10.8', `${dbType} is not a valid dbType`); return dbType; } let dbConfig; async function getDBConfig(): Promise { if (dbConfig !== undefined) { return dbConfig; } if ( process.env.COMM_DATABASE_DATABASE && process.env.COMM_DATABASE_USER && process.env.COMM_DATABASE_PASSWORD ) { dbConfig = { host: process.env.COMM_DATABASE_HOST || 'localhost', user: process.env.COMM_DATABASE_USER, password: process.env.COMM_DATABASE_PASSWORD, database: process.env.COMM_DATABASE_DATABASE, dbType: assertValidDBType(process.env.COMM_DATABASE_TYPE), }; } else { const importedDBConfig = await importJSON({ folder: 'secrets', name: 'db_config', }); invariant(importedDBConfig, 'DB config missing'); dbConfig = { ...importedDBConfig, dbType: assertValidDBType(importedDBConfig.dbType), }; } return dbConfig; } async function getDBType(): Promise { const config = await getDBConfig(); return config.dbType; } export { getDBConfig, getDBType }; diff --git a/keyserver/src/database/db-version.js b/keyserver/src/database/db-version.js index f636f80ed..d408861a9 100644 --- a/keyserver/src/database/db-version.js +++ b/keyserver/src/database/db-version.js @@ -1,33 +1,33 @@ // @flow import type { QueryResults } from 'mysql'; -import { dbQuery, SQL } from './database'; +import { dbQuery, SQL } from './database.js'; const dbVersionMetadataKey = 'db_version'; async function fetchDBVersion(): Promise { const versionQuery = SQL` SELECT data FROM metadata WHERE name = ${dbVersionMetadataKey}; `; const [[versionResult]] = await dbQuery(versionQuery); if (!versionResult) { return -1; } return versionResult.data; } async function updateDBVersion(dbVersion: number): Promise { const updateQuery = SQL` INSERT INTO metadata (name, data) VALUES (${dbVersionMetadataKey}, ${dbVersion}) ON DUPLICATE KEY UPDATE data = ${dbVersion}; `; return dbQuery(updateQuery); } export { fetchDBVersion, updateDBVersion }; diff --git a/keyserver/src/database/migration-config.js b/keyserver/src/database/migration-config.js index 6cdae2e68..2591d924b 100644 --- a/keyserver/src/database/migration-config.js +++ b/keyserver/src/database/migration-config.js @@ -1,246 +1,246 @@ // @flow import fs from 'fs'; import { policyTypes } from 'lib/facts/policies.js'; -import { dbQuery, SQL } from '../database/database'; -import { updateRolesAndPermissionsForAllThreads } from '../updaters/thread-permission-updaters'; +import { dbQuery, SQL } from '../database/database.js'; +import { updateRolesAndPermissionsForAllThreads } from '../updaters/thread-permission-updaters.js'; const migrations: $ReadOnlyMap Promise> = new Map([ [ 0, async () => { await makeSureBaseRoutePathExists('facts/commapp_url.json'); await makeSureBaseRoutePathExists('facts/squadcal_url.json'); }, ], [ 1, async () => { try { await fs.promises.unlink('facts/url.json'); } catch {} }, ], [ 2, async () => { await fixBaseRoutePathForLocalhost('facts/commapp_url.json'); await fixBaseRoutePathForLocalhost('facts/squadcal_url.json'); }, ], [3, updateRolesAndPermissionsForAllThreads], [ 4, async () => { await dbQuery(SQL`ALTER TABLE uploads ADD INDEX container (container)`); }, ], [ 5, async () => { await dbQuery(SQL` ALTER TABLE cookies ADD device_id varchar(255) DEFAULT NULL, ADD public_key varchar(255) DEFAULT NULL, ADD social_proof varchar(255) DEFAULT NULL; `); }, ], [ 7, async () => { await dbQuery( SQL` ALTER TABLE users DROP COLUMN IF EXISTS public_key, MODIFY hash char(60) COLLATE utf8mb4_bin DEFAULT NULL; ALTER TABLE sessions DROP COLUMN IF EXISTS public_key; `, { multipleStatements: true }, ); }, ], [ 8, async () => { await dbQuery( SQL` ALTER TABLE users ADD COLUMN IF NOT EXISTS ethereum_address char(42) DEFAULT NULL; `, ); }, ], [ 9, async () => { await dbQuery( SQL` ALTER TABLE messages ADD COLUMN IF NOT EXISTS target_message bigint(20) DEFAULT NULL; ALTER TABLE messages ADD INDEX target_message (target_message); `, { multipleStatements: true }, ); }, ], [ 10, async () => { await dbQuery(SQL` CREATE TABLE IF NOT EXISTS policy_acknowledgments ( user bigint(20) NOT NULL, policy varchar(255) NOT NULL, date bigint(20) NOT NULL, confirmed tinyint(1) UNSIGNED NOT NULL DEFAULT 0, PRIMARY KEY (user, policy) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; `); }, ], [ 11, async () => { const time = Date.now(); await dbQuery(SQL` INSERT IGNORE INTO policy_acknowledgments (policy, user, date, confirmed) SELECT ${policyTypes.tosAndPrivacyPolicy}, id, ${time}, 1 FROM users `); }, ], [ 12, async () => { await dbQuery(SQL` CREATE TABLE IF NOT EXISTS siwe_nonces ( nonce char(17) NOT NULL, creation_time bigint(20) NOT NULL, PRIMARY KEY (nonce) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; `); }, ], [ 13, async () => { await Promise.all([ writeSquadCalRoute('facts/squadcal_url.json'), moveToNonApacheConfig('facts/commapp_url.json', '/comm/'), moveToNonApacheConfig('facts/landing_url.json', '/commlanding/'), ]); }, ], [ 14, async () => { await dbQuery(SQL` ALTER TABLE cookies MODIFY COLUMN social_proof mediumtext CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL; `); }, ], ]); const newDatabaseVersion: number = Math.max(...migrations.keys()); async function writeJSONToFile(data: any, filePath: string): Promise { console.warn(`updating ${filePath} to ${JSON.stringify(data)}`); const fileHandle = await fs.promises.open(filePath, 'w'); await fileHandle.writeFile(JSON.stringify(data, null, ' '), 'utf8'); await fileHandle.close(); } async function makeSureBaseRoutePathExists(filePath: string): Promise { let readFile, json; try { readFile = await fs.promises.open(filePath, 'r'); const contents = await readFile.readFile('utf8'); json = JSON.parse(contents); } catch { return; } finally { if (readFile) { await readFile.close(); } } if (json.baseRoutePath) { return; } let baseRoutePath; if (json.baseDomain === 'http://localhost') { baseRoutePath = json.basePath; } else if (filePath.endsWith('commapp_url.json')) { baseRoutePath = '/commweb/'; } else { baseRoutePath = '/'; } const newJSON = { ...json, baseRoutePath }; console.warn(`updating ${filePath} to ${JSON.stringify(newJSON)}`); await writeJSONToFile(newJSON, filePath); } async function fixBaseRoutePathForLocalhost(filePath: string): Promise { let readFile, json; try { readFile = await fs.promises.open(filePath, 'r'); const contents = await readFile.readFile('utf8'); json = JSON.parse(contents); } catch { return; } finally { if (readFile) { await readFile.close(); } } if (json.baseDomain !== 'http://localhost') { return; } const baseRoutePath = '/'; json = { ...json, baseRoutePath }; console.warn(`updating ${filePath} to ${JSON.stringify(json)}`); await writeJSONToFile(json, filePath); } async function moveToNonApacheConfig( filePath: string, routePath: string, ): Promise { if (process.env.COMM_DATABASE_HOST) { return; } // Since the non-Apache config is so opinionated, just write expected config const newJSON = { baseDomain: 'http://localhost:3000', basePath: routePath, baseRoutePath: routePath, https: false, proxy: 'none', }; await writeJSONToFile(newJSON, filePath); } async function writeSquadCalRoute(filePath: string): Promise { if (process.env.COMM_DATABASE_HOST) { return; } // Since the non-Apache config is so opinionated, just write expected config const newJSON = { baseDomain: 'http://localhost:3000', basePath: '/comm/', baseRoutePath: '/', https: false, proxy: 'apache', }; await writeJSONToFile(newJSON, filePath); } export { migrations, newDatabaseVersion }; diff --git a/keyserver/src/database/migrations.js b/keyserver/src/database/migrations.js index ccdb3edb4..c67676c4c 100644 --- a/keyserver/src/database/migrations.js +++ b/keyserver/src/database/migrations.js @@ -1,88 +1,88 @@ // @flow import type { QueryResults } from 'mysql'; -import { isDev } from 'lib/utils/dev-utils'; -import { getMessageForException } from 'lib/utils/errors'; -import sleep from 'lib/utils/sleep'; +import { isDev } from 'lib/utils/dev-utils.js'; +import { getMessageForException } from 'lib/utils/errors.js'; +import sleep from 'lib/utils/sleep.js'; -import { dbQuery, SQL, setConnectionContext } from './database'; -import { fetchDBVersion, updateDBVersion } from './db-version'; -import { migrations } from './migration-config'; -import { setupDB } from './setup-db'; +import { dbQuery, SQL, setConnectionContext } from './database.js'; +import { fetchDBVersion, updateDBVersion } from './db-version.js'; +import { migrations } from './migration-config.js'; +import { setupDB } from './setup-db.js'; async function migrate(): Promise { if (isDev) { await sleep(5000); } let dbVersion = null; try { dbVersion = await setUpDBAndReturnVersion(); console.log(`(node:${process.pid}) DB version: ${dbVersion}`); } catch (e) { const dbVersionExceptionMessage = String(getMessageForException(e)); console.error(`(node:${process.pid}) ${dbVersionExceptionMessage}`); return false; } setConnectionContext({ migrationsActive: true }); for (const [idx, migration] of migrations.entries()) { if (idx <= dbVersion) { continue; } try { await startTransaction(); await migration(); await updateDBVersion(idx); await commitTransaction(); console.log(`(node:${process.pid}) migration ${idx} succeeded.`); } catch (e) { const transactionExceptionMessage = String(getMessageForException(e)); console.error(`(node:${process.pid}) migration ${idx} failed.`); console.error(transactionExceptionMessage); await rollbackTransaction(); return false; } } setConnectionContext({ migrationsActive: false }); return true; } const MYSQL_TABLE_DOESNT_EXIST_ERROR_CODE = 1146; async function setUpDBAndReturnVersion(): Promise { try { return await fetchDBVersion(); } catch (e) { if (e.errno !== MYSQL_TABLE_DOESNT_EXIST_ERROR_CODE) { throw e; } await setupDB(); return await fetchDBVersion(); } } async function startTransaction(): Promise { const beginTxnQuery = SQL` START TRANSACTION; `; return dbQuery(beginTxnQuery); } async function commitTransaction(): Promise { const endTxnQuery = SQL` COMMIT; `; return dbQuery(endTxnQuery); } async function rollbackTransaction(): Promise { const rollbackTxnQuery = SQL` ROLLBACK; `; return dbQuery(rollbackTxnQuery); } export { migrate }; diff --git a/keyserver/src/database/monitor.js b/keyserver/src/database/monitor.js index d2848efe0..11f983aba 100644 --- a/keyserver/src/database/monitor.js +++ b/keyserver/src/database/monitor.js @@ -1,67 +1,67 @@ // @flow -import { queryWarnTime } from './consts'; -import type { Pool } from './types'; +import { queryWarnTime } from './consts.js'; +import type { Pool } from './types.js'; function countDecimals(num: number) { return 1 + (num === 0 ? 0 : Math.floor(Math.log10(num))); } class DatabaseMonitor { pool: Pool; activeQueries: number = 0; lastDecimalCount: number = 1; constructor(pool: Pool) { this.pool = pool; pool.on('acquire', this.onAcquire); pool.on('release', this.onRelease); pool.on('enqueue', this.onEnqueue); } get queuedQueries(): number { return this.pool.pool._connectionQueue.length; } get outstandingQueries(): number { return this.activeQueries + this.queuedQueries; } countOutstandingQueries(): number { const count = this.outstandingQueries; const decimalCount = countDecimals(count); if (decimalCount > this.lastDecimalCount) { const lowerBound = Math.pow(10, this.lastDecimalCount); console.log(`more than ${lowerBound - 1} queries outstanding`); } else if (decimalCount < this.lastDecimalCount) { const upperBound = Math.pow(10, decimalCount); console.log(`fewer than ${upperBound} queries outstanding`); } this.lastDecimalCount = decimalCount; return count; } onAcquire: () => void = () => { this.activeQueries += 1; this.countOutstandingQueries(); }; onRelease: () => void = () => { this.activeQueries -= 1; this.countOutstandingQueries(); }; onEnqueue: () => void = () => { this.countOutstandingQueries(); }; reportLaggingQuery: (query: string) => void = query => { const count = this.countOutstandingQueries(); console.log( `a query is taking more than ${queryWarnTime}ms to execute. ` + `there are currently ${count} queries outstanding. query: ${query}`, ); }; } export default DatabaseMonitor; diff --git a/keyserver/src/database/setup-db.js b/keyserver/src/database/setup-db.js index 63a5f4051..7af2b3034 100644 --- a/keyserver/src/database/setup-db.js +++ b/keyserver/src/database/setup-db.js @@ -1,428 +1,428 @@ // @flow -import ashoat from 'lib/facts/ashoat'; -import bots from 'lib/facts/bots'; -import genesis from 'lib/facts/genesis'; -import { usernameMaxLength } from 'lib/shared/account-utils'; -import { sortIDs } from 'lib/shared/relationship-utils'; -import { undirectedStatus } from 'lib/types/relationship-types'; -import { threadTypes } from 'lib/types/thread-types'; - -import { createThread } from '../creators/thread-creator'; -import { dbQuery, SQL } from '../database/database'; -import { updateDBVersion } from '../database/db-version'; -import { newDatabaseVersion } from '../database/migration-config'; -import { createScriptViewer } from '../session/scripts'; +import ashoat from 'lib/facts/ashoat.js'; +import bots from 'lib/facts/bots.js'; +import genesis from 'lib/facts/genesis.js'; +import { usernameMaxLength } from 'lib/shared/account-utils.js'; +import { sortIDs } from 'lib/shared/relationship-utils.js'; +import { undirectedStatus } from 'lib/types/relationship-types.js'; +import { threadTypes } from 'lib/types/thread-types.js'; + +import { createThread } from '../creators/thread-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { updateDBVersion } from '../database/db-version.js'; +import { newDatabaseVersion } from '../database/migration-config.js'; +import { createScriptViewer } from '../session/scripts.js'; async function setupDB() { await createTables(); await createUsers(); await createThreads(); await setUpMetadataTable(); } async function createTables() { await dbQuery( SQL` CREATE TABLE cookies ( id bigint(20) NOT NULL, hash char(60) NOT NULL, user bigint(20) DEFAULT NULL, platform varchar(255) DEFAULT NULL, creation_time bigint(20) NOT NULL, last_used bigint(20) NOT NULL, device_token varchar(255) DEFAULT NULL, versions json DEFAULT NULL, device_id varchar(255) DEFAULT NULL, public_key varchar(255) DEFAULT NULL, social_proof mediumtext CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL, \`primary\` TINYINT(1) DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE days ( id bigint(20) NOT NULL, date date NOT NULL, thread bigint(20) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE entries ( id bigint(20) NOT NULL, day bigint(20) NOT NULL, text mediumtext COLLATE utf8mb4_bin NOT NULL, creator bigint(20) NOT NULL, creation_time bigint(20) NOT NULL, last_update bigint(20) NOT NULL, deleted tinyint(1) UNSIGNED NOT NULL, creation varchar(255) COLLATE utf8mb4_bin DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; CREATE TABLE focused ( user bigint(20) NOT NULL, session bigint(20) NOT NULL, thread bigint(20) NOT NULL, time bigint(20) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE ids ( id bigint(20) NOT NULL, table_name varchar(255) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE memberships ( thread bigint(20) NOT NULL, user bigint(20) NOT NULL, role bigint(20) NOT NULL, permissions json DEFAULT NULL, permissions_for_children json DEFAULT NULL, creation_time bigint(20) NOT NULL, subscription json NOT NULL, last_message bigint(20) NOT NULL DEFAULT 0, last_read_message bigint(20) NOT NULL DEFAULT 0, sender tinyint(1) UNSIGNED NOT NULL DEFAULT 0 ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE messages ( id bigint(20) NOT NULL, thread bigint(20) NOT NULL, user bigint(20) NOT NULL, type tinyint(3) UNSIGNED NOT NULL, content mediumtext COLLATE utf8mb4_bin, time bigint(20) NOT NULL, creation varchar(255) COLLATE utf8mb4_bin DEFAULT NULL, target_message bigint(20) DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; CREATE TABLE notifications ( id bigint(20) NOT NULL, user bigint(20) NOT NULL, thread bigint(20) DEFAULT NULL, message bigint(20) DEFAULT NULL, collapse_key varchar(255) DEFAULT NULL, delivery json NOT NULL, rescinded tinyint(1) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE reports ( id bigint(20) NOT NULL, user bigint(20) NOT NULL, type tinyint(3) UNSIGNED NOT NULL, platform varchar(255) NOT NULL, report json NOT NULL, creation_time bigint(20) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE revisions ( id bigint(20) NOT NULL, entry bigint(20) NOT NULL, author bigint(20) NOT NULL, text mediumtext COLLATE utf8mb4_bin NOT NULL, creation_time bigint(20) NOT NULL, session bigint(20) NOT NULL, last_update bigint(20) NOT NULL, deleted tinyint(1) UNSIGNED NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; CREATE TABLE roles ( id bigint(20) NOT NULL, thread bigint(20) NOT NULL, name varchar(191) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL, permissions json NOT NULL, creation_time bigint(20) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE sessions ( id bigint(20) NOT NULL, user bigint(20) NOT NULL, cookie bigint(20) NOT NULL, query json NOT NULL, creation_time bigint(20) NOT NULL, last_update bigint(20) NOT NULL, last_validated bigint(20) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE threads ( id bigint(20) NOT NULL, type tinyint(3) NOT NULL, name varchar(191) COLLATE utf8mb4_bin DEFAULT NULL, description mediumtext COLLATE utf8mb4_bin, parent_thread_id bigint(20) DEFAULT NULL, containing_thread_id bigint(20) DEFAULT NULL, community bigint(20) DEFAULT NULL, depth int UNSIGNED NOT NULL DEFAULT 0, default_role bigint(20) NOT NULL, creator bigint(20) NOT NULL, creation_time bigint(20) NOT NULL, color char(6) COLLATE utf8mb4_bin NOT NULL, source_message bigint(20) DEFAULT NULL UNIQUE, replies_count int UNSIGNED NOT NULL DEFAULT 0 ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; CREATE TABLE updates ( id bigint(20) NOT NULL, user bigint(20) NOT NULL, type tinyint(3) UNSIGNED NOT NULL, \`key\` bigint(20) DEFAULT NULL, updater bigint(20) DEFAULT NULL, target bigint(20) DEFAULT NULL, content mediumtext CHARACTER SET utf8mb4 COLLATE utf8mb4_bin, time bigint(20) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE uploads ( id bigint(20) NOT NULL, uploader bigint(20) NOT NULL, container bigint(20) DEFAULT NULL, type varchar(255) NOT NULL, filename varchar(255) NOT NULL, mime varchar(255) NOT NULL, content longblob NOT NULL, secret varchar(255) NOT NULL, creation_time bigint(20) NOT NULL, extra json DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; CREATE TABLE users ( id bigint(20) NOT NULL, username varchar(${usernameMaxLength}) COLLATE utf8mb4_bin NOT NULL, hash char(60) COLLATE utf8mb4_bin DEFAULT NULL, avatar varchar(191) COLLATE utf8mb4_bin DEFAULT NULL, ethereum_address char(42) DEFAULT NULL, creation_time bigint(20) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; CREATE TABLE relationships_undirected ( user1 bigint(20) NOT NULL, user2 bigint(20) NOT NULL, status tinyint(1) UNSIGNED NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE relationships_directed ( user1 bigint(20) NOT NULL, user2 bigint(20) NOT NULL, status tinyint(1) UNSIGNED NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE versions ( id bigint(20) NOT NULL, code_version int(11) NOT NULL, platform varchar(255) NOT NULL, creation_time bigint(20) NOT NULL, deploy_time bigint(20) DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; CREATE TABLE one_time_keys ( session bigint(20) NOT NULL, one_time_key char(43) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; CREATE TABLE user_messages ( recipient bigint(20) NOT NULL, thread bigint(20) NOT NULL, message bigint(20) NOT NULL, time bigint(20) NOT NULL, data mediumtext COLLATE utf8mb4_bin DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; CREATE TABLE settings ( user bigint(20) NOT NULL, name varchar(255) NOT NULL, data mediumtext COLLATE utf8mb4_bin DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; CREATE TABLE metadata ( name varchar(255) NOT NULL, data varchar(255) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; CREATE TABLE policy_acknowledgments ( user bigint(20) NOT NULL, policy varchar(255) NOT NULL, date bigint(20) NOT NULL, confirmed tinyint(1) UNSIGNED NOT NULL DEFAULT 0 ) ENGINE=InnoDB DEFAULT CHARSET=utf8; CREATE TABLE siwe_nonces ( nonce char(17) NOT NULL, creation_time bigint(20) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; ALTER TABLE cookies ADD PRIMARY KEY (id), ADD UNIQUE KEY device_token (device_token), ADD KEY user_device_token (user,device_token); ALTER TABLE days ADD PRIMARY KEY (id), ADD UNIQUE KEY date_thread (date,thread) USING BTREE; ALTER TABLE entries ADD PRIMARY KEY (id), ADD UNIQUE KEY creator_creation (creator,creation), ADD KEY day (day); ALTER TABLE focused ADD UNIQUE KEY user_cookie_thread (user,session,thread), ADD KEY thread_user (thread,user); ALTER TABLE ids ADD PRIMARY KEY (id); ALTER TABLE memberships ADD UNIQUE KEY thread_user (thread,user) USING BTREE, ADD KEY role (role) USING BTREE; ALTER TABLE memberships ADD INDEX user (user); ALTER TABLE messages ADD PRIMARY KEY (id), ADD UNIQUE KEY user_creation (user,creation), ADD KEY thread (thread), ADD INDEX target_message (target_message); ALTER TABLE notifications ADD PRIMARY KEY (id), ADD KEY rescinded_user_collapse_key (rescinded,user,collapse_key) USING BTREE, ADD KEY thread (thread), ADD KEY rescinded_user_thread_message (rescinded,user,thread,message) USING BTREE; ALTER TABLE notifications ADD INDEX user (user); ALTER TABLE reports ADD PRIMARY KEY (id); ALTER TABLE revisions ADD PRIMARY KEY (id), ADD KEY entry (entry); ALTER TABLE roles ADD PRIMARY KEY (id), ADD KEY thread (thread); ALTER TABLE sessions ADD PRIMARY KEY (id), ADD KEY user (user); ALTER TABLE threads ADD PRIMARY KEY (id), ADD INDEX parent_thread_id (parent_thread_id), ADD INDEX containing_thread_id (containing_thread_id), ADD INDEX community (community); ALTER TABLE updates ADD PRIMARY KEY (id), ADD INDEX user_time (user,time), ADD INDEX target_time (target, time), ADD INDEX user_key_target_type_time (user, \`key\`, target, type, time), ADD INDEX user_key_type_time (user, \`key\`, type, time), ADD INDEX user_key_time (user, \`key\`, time); ALTER TABLE uploads ADD PRIMARY KEY (id), ADD INDEX container (container); ALTER TABLE users ADD PRIMARY KEY (id), ADD UNIQUE KEY username (username); ALTER TABLE relationships_undirected ADD UNIQUE KEY user1_user2 (user1,user2), ADD UNIQUE KEY user2_user1 (user2,user1); ALTER TABLE relationships_directed ADD UNIQUE KEY user1_user2 (user1,user2), ADD UNIQUE KEY user2_user1 (user2,user1); ALTER TABLE versions ADD PRIMARY KEY (id), ADD UNIQUE KEY code_version_platform (code_version,platform); ALTER TABLE one_time_keys ADD PRIMARY KEY (session, one_time_key); ALTER TABLE user_messages ADD INDEX recipient_time (recipient, time), ADD INDEX recipient_thread_time (recipient, thread, time), ADD INDEX thread (thread), ADD PRIMARY KEY (recipient, message); ALTER TABLE ids MODIFY id bigint(20) NOT NULL AUTO_INCREMENT; ALTER TABLE settings ADD PRIMARY KEY (user, name); ALTER TABLE metadata ADD PRIMARY KEY (name); ALTER TABLE policy_acknowledgments ADD PRIMARY KEY (user, policy); ALTER TABLE siwe_nonces ADD PRIMARY KEY (nonce); `, { multipleStatements: true }, ); } async function createUsers() { const [user1, user2] = sortIDs(bots.commbot.userID, ashoat.id); await dbQuery( SQL` INSERT INTO ids (id, table_name) VALUES (${bots.commbot.userID}, 'users'), (${ashoat.id}, 'users'); INSERT INTO users (id, username, hash, avatar, creation_time) VALUES (${bots.commbot.userID}, 'commbot', '', NULL, 1530049900980), (${ashoat.id}, 'ashoat', '', NULL, 1463588881886); INSERT INTO relationships_undirected (user1, user2, status) VALUES (${user1}, ${user2}, ${undirectedStatus.KNOW_OF}); `, { multipleStatements: true }, ); } const createThreadOptions = { forceAddMembers: true }; async function createThreads() { const insertIDsPromise = dbQuery(SQL` INSERT INTO ids (id, table_name) VALUES (${genesis.id}, 'threads'), (${bots.commbot.staffThreadID}, 'threads') `); const ashoatViewer = createScriptViewer(ashoat.id); const createGenesisPromise = createThread( ashoatViewer, { id: genesis.id, type: threadTypes.GENESIS, name: genesis.name, description: genesis.description, initialMemberIDs: [bots.commbot.userID], }, createThreadOptions, ); await Promise.all([insertIDsPromise, createGenesisPromise]); const commbotViewer = createScriptViewer(bots.commbot.userID); await createThread( commbotViewer, { id: bots.commbot.staffThreadID, type: threadTypes.COMMUNITY_SECRET_SUBTHREAD, initialMemberIDs: [ashoat.id], }, createThreadOptions, ); } async function setUpMetadataTable() { await updateDBVersion(newDatabaseVersion); } export { setupDB }; diff --git a/keyserver/src/deleters/account-deleters.js b/keyserver/src/deleters/account-deleters.js index bf7edc0ad..583982432 100644 --- a/keyserver/src/deleters/account-deleters.js +++ b/keyserver/src/deleters/account-deleters.js @@ -1,142 +1,142 @@ // @flow import bcrypt from 'twin-bcrypt'; import type { LogOutResponse, DeleteAccountRequest, -} from 'lib/types/account-types'; -import { updateTypes } from 'lib/types/update-types'; -import type { UserInfo } from 'lib/types/user-types'; -import { ServerError } from 'lib/utils/errors'; -import { values } from 'lib/utils/objects'; -import { promiseAll } from 'lib/utils/promises'; +} from 'lib/types/account-types.js'; +import { updateTypes } from 'lib/types/update-types.js'; +import type { UserInfo } from 'lib/types/user-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { values } from 'lib/utils/objects.js'; +import { promiseAll } from 'lib/utils/promises.js'; -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL } from '../database/database'; -import { fetchKnownUserInfos } from '../fetchers/user-fetchers'; -import { rescindPushNotifs } from '../push/rescind'; -import { handleAsyncPromise } from '../responders/handlers'; -import { createNewAnonymousCookie } from '../session/cookies'; -import type { Viewer } from '../session/viewer'; +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { fetchKnownUserInfos } from '../fetchers/user-fetchers.js'; +import { rescindPushNotifs } from '../push/rescind.js'; +import { handleAsyncPromise } from '../responders/handlers.js'; +import { createNewAnonymousCookie } from '../session/cookies.js'; +import type { Viewer } from '../session/viewer.js'; async function deleteAccount( viewer: Viewer, request?: DeleteAccountRequest, ): Promise { if (!viewer.loggedIn || (!request && !viewer.isScriptViewer)) { throw new ServerError('not_logged_in'); } if (request) { const hashQuery = SQL`SELECT hash FROM users WHERE id = ${viewer.userID}`; const [result] = await dbQuery(hashQuery); if (result.length === 0) { throw new ServerError('internal_error'); } const row = result[0]; const requestPasswordConsistentWithDB = !!row.hash === !!request.password; const shouldValidatePassword = !!row.hash; if ( !requestPasswordConsistentWithDB || (shouldValidatePassword && !bcrypt.compareSync(request.password, row.hash)) ) { throw new ServerError('invalid_credentials'); } } const deletedUserID = viewer.userID; await rescindPushNotifs(SQL`n.user = ${deletedUserID}`, SQL`NULL`); const knownUserInfos = await fetchKnownUserInfos(viewer); const usersToUpdate = values(knownUserInfos).filter( userID => userID !== deletedUserID, ); // TODO: if this results in any orphaned orgs, convert them to chats const deletionQuery = SQL` START TRANSACTION; DELETE FROM users WHERE id = ${deletedUserID}; DELETE FROM ids WHERE id = ${deletedUserID}; DELETE c, i FROM cookies c LEFT JOIN ids i ON i.id = c.id WHERE c.user = ${deletedUserID}; DELETE FROM memberships WHERE user = ${deletedUserID}; DELETE FROM focused WHERE user = ${deletedUserID}; DELETE n, i FROM notifications n LEFT JOIN ids i ON i.id = n.id WHERE n.user = ${deletedUserID}; DELETE u, i FROM updates u LEFT JOIN ids i ON i.id = u.id WHERE u.user = ${deletedUserID}; DELETE s, i FROM sessions s LEFT JOIN ids i ON i.id = s.id WHERE s.user = ${deletedUserID}; DELETE r, i FROM reports r LEFT JOIN ids i ON i.id = r.id WHERE r.user = ${deletedUserID}; DELETE FROM relationships_undirected WHERE user1 = ${deletedUserID}; DELETE FROM relationships_undirected WHERE user2 = ${deletedUserID}; DELETE FROM relationships_directed WHERE user1 = ${deletedUserID}; DELETE FROM relationships_directed WHERE user2 = ${deletedUserID}; COMMIT; `; const promises = {}; promises.deletion = dbQuery(deletionQuery, { multipleStatements: true }); if (request) { promises.anonymousViewerData = createNewAnonymousCookie({ platformDetails: viewer.platformDetails, deviceToken: viewer.deviceToken, }); } const { anonymousViewerData } = await promiseAll(promises); if (anonymousViewerData) { viewer.setNewCookie(anonymousViewerData); } const deletionUpdatesPromise = createAccountDeletionUpdates( usersToUpdate, deletedUserID, ); if (request) { handleAsyncPromise(deletionUpdatesPromise); } else { await deletionUpdatesPromise; } if (request) { return { currentUserInfo: { id: viewer.id, anonymous: true, }, }; } return null; } async function createAccountDeletionUpdates( knownUserInfos: $ReadOnlyArray, deletedUserID: string, ): Promise { const time = Date.now(); const updateDatas = []; for (const userInfo of knownUserInfos) { const { id: userID } = userInfo; updateDatas.push({ type: updateTypes.DELETE_ACCOUNT, userID, time, deletedUserID, }); } await createUpdates(updateDatas); } export { deleteAccount }; diff --git a/keyserver/src/deleters/activity-deleters.js b/keyserver/src/deleters/activity-deleters.js index f99a78bd8..a58778239 100644 --- a/keyserver/src/deleters/activity-deleters.js +++ b/keyserver/src/deleters/activity-deleters.js @@ -1,33 +1,33 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; -import { earliestFocusedTimeConsideredExpired } from '../shared/focused-times'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; +import { earliestFocusedTimeConsideredExpired } from '../shared/focused-times.js'; async function deleteActivityForViewerSession( viewer: Viewer, beforeTime?: number, ): Promise { const query = SQL` DELETE FROM focused WHERE user = ${viewer.userID} AND session = ${viewer.session} `; if (beforeTime !== undefined) { query.append(SQL`AND time < ${beforeTime}`); } await dbQuery(query); } async function deleteOrphanedActivity(): Promise { const time = earliestFocusedTimeConsideredExpired(); await dbQuery(SQL` DELETE f FROM focused f LEFT JOIN threads t ON t.id = f.thread LEFT JOIN users u ON u.id = f.user LEFT JOIN sessions s ON s.id = f.session WHERE t.id IS NULL OR u.id IS NULL OR s.id IS NULL OR f.time <= ${time} `); } export { deleteActivityForViewerSession, deleteOrphanedActivity }; diff --git a/keyserver/src/deleters/cookie-deleters.js b/keyserver/src/deleters/cookie-deleters.js index 145c32bae..02b180d27 100644 --- a/keyserver/src/deleters/cookie-deleters.js +++ b/keyserver/src/deleters/cookie-deleters.js @@ -1,41 +1,41 @@ // @flow import invariant from 'invariant'; -import { cookieLifetime } from 'lib/types/session-types'; +import { cookieLifetime } from 'lib/types/session-types.js'; -import { dbQuery, SQL, mergeOrConditions } from '../database/database'; -import type { SQLStatementType } from '../database/types'; +import { dbQuery, SQL, mergeOrConditions } from '../database/database.js'; +import type { SQLStatementType } from '../database/types.js'; async function deleteCookiesByConditions( conditions: $ReadOnlyArray, ) { invariant(conditions.length > 0, 'no conditions specified'); const conditionClause = mergeOrConditions(conditions); const query = SQL` DELETE c, i, s, si, u, iu, fo FROM cookies c LEFT JOIN ids i ON i.id = c.id LEFT JOIN sessions s ON s.cookie = c.id LEFT JOIN ids si ON si.id = s.id LEFT JOIN updates u ON u.target = c.id OR u.target = s.id LEFT JOIN ids iu ON iu.id = u.id LEFT JOIN focused fo ON fo.session = c.id OR fo.session = s.id WHERE `; query.append(conditionClause); await dbQuery(query); } async function deleteCookie(cookieID: string): Promise { const condition = SQL`c.id = ${cookieID}`; await deleteCookiesByConditions([condition]); } async function deleteExpiredCookies(): Promise { const earliestInvalidLastUpdate = Date.now() - cookieLifetime; const condition = SQL`c.last_used <= ${earliestInvalidLastUpdate}`; await deleteCookiesByConditions([condition]); } export { deleteCookie, deleteExpiredCookies }; diff --git a/keyserver/src/deleters/day-deleters.js b/keyserver/src/deleters/day-deleters.js index 81d3e07fc..375ba18d9 100644 --- a/keyserver/src/deleters/day-deleters.js +++ b/keyserver/src/deleters/day-deleters.js @@ -1,16 +1,16 @@ // @flow -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function deleteOrphanedDays(): Promise { await dbQuery(SQL` DELETE d, i FROM days d LEFT JOIN ids i ON i.id = d.id LEFT JOIN entries e ON e.day = d.id LEFT JOIN threads t ON t.id = d.thread WHERE e.day IS NULL OR t.id IS NULL `); } export { deleteOrphanedDays }; diff --git a/keyserver/src/deleters/entry-deleters.js b/keyserver/src/deleters/entry-deleters.js index 01670c1a7..970c67c9f 100644 --- a/keyserver/src/deleters/entry-deleters.js +++ b/keyserver/src/deleters/entry-deleters.js @@ -1,275 +1,275 @@ // @flow import type { DeleteEntryRequest, DeleteEntryResponse, RestoreEntryRequest, RestoreEntryResponse, -} from 'lib/types/entry-types'; -import { messageTypes } from 'lib/types/message-types'; -import { threadPermissions } from 'lib/types/thread-types'; -import { dateString } from 'lib/utils/date-utils'; -import { ServerError } from 'lib/utils/errors'; -import { values } from 'lib/utils/objects'; +} from 'lib/types/entry-types.js'; +import { messageTypes } from 'lib/types/message-types.js'; +import { threadPermissions } from 'lib/types/thread-types.js'; +import { dateString } from 'lib/utils/date-utils.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { values } from 'lib/utils/objects.js'; -import createIDs from '../creators/id-creator'; -import createMessages from '../creators/message-creator'; -import { dbQuery, SQL } from '../database/database'; -import { checkThreadPermissionForEntry } from '../fetchers/entry-fetchers'; -import { fetchMessageInfoForEntryAction } from '../fetchers/message-fetchers'; -import { fetchUpdateInfoForEntryUpdate } from '../fetchers/update-fetchers'; -import type { Viewer } from '../session/viewer'; -import { createUpdateDatasForChangedEntryInfo } from '../updaters/entry-updaters'; +import createIDs from '../creators/id-creator.js'; +import createMessages from '../creators/message-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { checkThreadPermissionForEntry } from '../fetchers/entry-fetchers.js'; +import { fetchMessageInfoForEntryAction } from '../fetchers/message-fetchers.js'; +import { fetchUpdateInfoForEntryUpdate } from '../fetchers/update-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; +import { createUpdateDatasForChangedEntryInfo } from '../updaters/entry-updaters.js'; const lastRevisionQuery = (entryID: string) => SQL` SELECT r.id, r.author, r.text, r.session, r.last_update, r.deleted, DAY(d.date) AS day, MONTH(d.date) AS month, YEAR(d.date) AS year, d.thread, d.date, e.creation_time, e.creator FROM revisions r LEFT JOIN entries e ON e.id = r.entry LEFT JOIN days d ON d.id = e.day WHERE r.entry = ${entryID} ORDER BY r.last_update DESC LIMIT 1 `; async function deleteEntry( viewer: Viewer, request: DeleteEntryRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const [hasPermission, [lastRevisionResult]] = await Promise.all([ checkThreadPermissionForEntry( viewer, request.entryID, threadPermissions.EDIT_ENTRIES, ), dbQuery(lastRevisionQuery(request.entryID)), ]); if (!hasPermission) { throw new ServerError('invalid_credentials'); } if (lastRevisionResult.length === 0) { throw new ServerError('unknown_error'); } const lastRevisionRow = lastRevisionResult[0]; const threadID = lastRevisionRow.thread.toString(); if (lastRevisionRow.deleted) { const [rawMessageInfo, fetchUpdatesResult] = await Promise.all([ fetchMessageInfoForEntryAction( viewer, messageTypes.DELETE_ENTRY, request.entryID, threadID, ), fetchUpdateInfoForEntryUpdate(viewer, request.entryID), ]); return { threadID, newMessageInfos: rawMessageInfo ? [rawMessageInfo] : [], updatesResult: { viewerUpdates: fetchUpdatesResult.updateInfos, userInfos: values(fetchUpdatesResult.userInfos), }, }; } const text = lastRevisionRow.text; const viewerID = viewer.userID; if (viewer.session !== lastRevisionRow.session && request.prevText !== text) { throw new ServerError('concurrent_modification', { db: text, ui: request.prevText, }); } else if (lastRevisionRow.last_update >= request.timestamp) { throw new ServerError('old_timestamp', { oldTime: lastRevisionRow.last_update, newTime: request.timestamp, }); } const dbPromises = []; dbPromises.push( dbQuery(SQL` UPDATE entries SET deleted = 1 WHERE id = ${request.entryID} `), ); const [revisionID] = await createIDs('revisions', 1); const revisionRow = [ revisionID, request.entryID, viewerID, text, request.timestamp, viewer.session, request.timestamp, 1, ]; dbPromises.push( dbQuery(SQL` INSERT INTO revisions(id, entry, author, text, creation_time, session, last_update, deleted) VALUES ${[revisionRow]} `), ); const messageData = { type: messageTypes.DELETE_ENTRY, threadID, creatorID: viewerID, time: Date.now(), entryID: request.entryID.toString(), date: dateString(lastRevisionRow.date), text, }; const oldEntryInfo = { id: request.entryID, threadID, text, year: lastRevisionRow.year, month: lastRevisionRow.month, day: lastRevisionRow.day, creationTime: lastRevisionRow.creation_time, creatorID: lastRevisionRow.creator.toString(), deleted: false, }; const newEntryInfo = { ...oldEntryInfo, deleted: true, }; const [newMessageInfos, updatesResult] = await Promise.all([ createMessages(viewer, [messageData]), createUpdateDatasForChangedEntryInfo( viewer, oldEntryInfo, newEntryInfo, request.calendarQuery, ), Promise.all(dbPromises), ]); return { threadID, newMessageInfos, updatesResult }; } async function restoreEntry( viewer: Viewer, request: RestoreEntryRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const [hasPermission, [lastRevisionResult]] = await Promise.all([ checkThreadPermissionForEntry( viewer, request.entryID, threadPermissions.EDIT_ENTRIES, ), dbQuery(lastRevisionQuery(request.entryID)), ]); if (!hasPermission) { throw new ServerError('invalid_credentials'); } if (lastRevisionResult.length === 0) { throw new ServerError('unknown_error'); } const lastRevisionRow = lastRevisionResult[0]; const oldEntryInfo = { id: request.entryID, threadID: lastRevisionRow.thread.toString(), text: lastRevisionRow.text, year: lastRevisionRow.year, month: lastRevisionRow.month, day: lastRevisionRow.day, creationTime: lastRevisionRow.creation_time, creatorID: lastRevisionRow.creator.toString(), deleted: !!lastRevisionRow.deleted, }; if (!oldEntryInfo.deleted) { const [rawMessageInfo, fetchUpdatesResult] = await Promise.all([ fetchMessageInfoForEntryAction( viewer, messageTypes.RESTORE_ENTRY, request.entryID, oldEntryInfo.threadID, ), fetchUpdateInfoForEntryUpdate(viewer, request.entryID), ]); return { newMessageInfos: rawMessageInfo ? [rawMessageInfo] : [], updatesResult: { viewerUpdates: fetchUpdatesResult.updateInfos, userInfos: values(fetchUpdatesResult.userInfos), }, }; } const viewerID = viewer.userID; const dbPromises = []; dbPromises.push( dbQuery(SQL` UPDATE entries SET deleted = 0 WHERE id = ${request.entryID} `), ); const [revisionID] = await createIDs('revisions', 1); const revisionRow = [ revisionID, request.entryID, viewerID, oldEntryInfo.text, request.timestamp, viewer.session, request.timestamp, 0, ]; dbPromises.push( dbQuery(SQL` INSERT INTO revisions(id, entry, author, text, creation_time, session, last_update, deleted) VALUES ${[revisionRow]} `), ); const messageData = { type: messageTypes.RESTORE_ENTRY, threadID: oldEntryInfo.threadID, creatorID: viewerID, time: Date.now(), entryID: request.entryID.toString(), date: dateString(lastRevisionRow.date), text: oldEntryInfo.text, }; const newEntryInfo = { ...oldEntryInfo, deleted: false, }; const [newMessageInfos, updatesResult] = await Promise.all([ createMessages(viewer, [messageData]), createUpdateDatasForChangedEntryInfo( viewer, oldEntryInfo, newEntryInfo, request.calendarQuery, ), Promise.all(dbPromises), ]); return { newMessageInfos, updatesResult }; } async function deleteOrphanedEntries(): Promise { await dbQuery(SQL` DELETE e, i FROM entries e LEFT JOIN ids i ON i.id = e.id LEFT JOIN days d ON d.id = e.day WHERE d.id IS NULL `); } export { deleteEntry, restoreEntry, deleteOrphanedEntries }; diff --git a/keyserver/src/deleters/membership-deleters.js b/keyserver/src/deleters/membership-deleters.js index 39b8de39f..a905ab576 100644 --- a/keyserver/src/deleters/membership-deleters.js +++ b/keyserver/src/deleters/membership-deleters.js @@ -1,14 +1,14 @@ // @flow -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function deleteOrphanedMemberships(): Promise { await dbQuery(SQL` DELETE m FROM memberships m LEFT JOIN threads t ON t.id = m.thread WHERE t.id IS NULL `); } export { deleteOrphanedMemberships }; diff --git a/keyserver/src/deleters/message-deleters.js b/keyserver/src/deleters/message-deleters.js index efeacf6d6..72648b2f3 100644 --- a/keyserver/src/deleters/message-deleters.js +++ b/keyserver/src/deleters/message-deleters.js @@ -1,17 +1,17 @@ // @flow -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function deleteOrphanedMessages(): Promise { await dbQuery(SQL` DELETE m, i, up, iu FROM messages m LEFT JOIN ids i ON i.id = m.id LEFT JOIN threads t ON t.id = m.thread LEFT JOIN uploads up ON up.container = m.id LEFT JOIN ids iu ON iu.id = up.id WHERE t.id IS NULL `); } export { deleteOrphanedMessages }; diff --git a/keyserver/src/deleters/notif-deleters.js b/keyserver/src/deleters/notif-deleters.js index 9ecf62274..48e6c2a3b 100644 --- a/keyserver/src/deleters/notif-deleters.js +++ b/keyserver/src/deleters/notif-deleters.js @@ -1,15 +1,15 @@ // @flow -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function deleteOrphanedNotifs(): Promise { await dbQuery(SQL` DELETE n, i FROM notifications n LEFT JOIN ids i ON i.id = n.id LEFT JOIN threads t ON t.id = n.thread WHERE t.id IS NULL AND n.rescinded = 1 `); } export { deleteOrphanedNotifs }; diff --git a/keyserver/src/deleters/one-time-key-deleters.js b/keyserver/src/deleters/one-time-key-deleters.js index f489fc6c7..54b972830 100644 --- a/keyserver/src/deleters/one-time-key-deleters.js +++ b/keyserver/src/deleters/one-time-key-deleters.js @@ -1,16 +1,16 @@ // @flow -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function deleteOneTimeKey( session: string, oneTimeKey: string, ): Promise { await dbQuery(SQL` DELETE FROM one_time_keys WHERE session = ${session} AND one_time_key = ${oneTimeKey} `); } export { deleteOneTimeKey }; diff --git a/keyserver/src/deleters/revision-deleters.js b/keyserver/src/deleters/revision-deleters.js index 6ef0a5ae0..d3eb80c49 100644 --- a/keyserver/src/deleters/revision-deleters.js +++ b/keyserver/src/deleters/revision-deleters.js @@ -1,15 +1,15 @@ // @flow -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function deleteOrphanedRevisions(): Promise { await dbQuery(SQL` DELETE r, i FROM revisions r LEFT JOIN ids i ON i.id = r.id LEFT JOIN entries e ON e.id = r.entry WHERE e.id IS NULL `); } export { deleteOrphanedRevisions }; diff --git a/keyserver/src/deleters/role-deleters.js b/keyserver/src/deleters/role-deleters.js index de58619ca..07016d37d 100644 --- a/keyserver/src/deleters/role-deleters.js +++ b/keyserver/src/deleters/role-deleters.js @@ -1,15 +1,15 @@ // @flow -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function deleteOrphanedRoles(): Promise { await dbQuery(SQL` DELETE r, i FROM roles r LEFT JOIN ids i ON i.id = r.id LEFT JOIN threads t ON t.id = r.thread WHERE t.id IS NULL `); } export { deleteOrphanedRoles }; diff --git a/keyserver/src/deleters/session-deleters.js b/keyserver/src/deleters/session-deleters.js index 66fd21959..b7fde9355 100644 --- a/keyserver/src/deleters/session-deleters.js +++ b/keyserver/src/deleters/session-deleters.js @@ -1,35 +1,35 @@ // @flow -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function deleteOrphanedSessions(): Promise { await dbQuery(SQL` DELETE s, i, f, up, iup FROM sessions s LEFT JOIN ids i ON i.id = s.id LEFT JOIN focused f ON f.session = s.id LEFT JOIN updates up ON up.target = s.id LEFT JOIN ids iup ON iup.id = up.id LEFT JOIN users u ON u.id = s.user LEFT JOIN cookies c ON c.id = s.cookie WHERE c.id IS NULL OR u.id IS NULL `); } const maxWebSessionAge = 3 * 24 * 60 * 60 * 1000; async function deleteOldWebSessions(): Promise { const oldestWebSessionToKeep = Date.now() - maxWebSessionAge; await dbQuery(SQL` DELETE s, i, f, up, iup FROM sessions s LEFT JOIN ids i ON i.id = s.id LEFT JOIN focused f ON f.session = s.id LEFT JOIN updates up ON up.target = s.id LEFT JOIN ids iup ON iup.id = up.id WHERE s.id != s.cookie AND s.last_update < ${oldestWebSessionToKeep} AND s.last_validated < ${oldestWebSessionToKeep} `); } export { deleteOrphanedSessions, deleteOldWebSessions }; diff --git a/keyserver/src/deleters/siwe-nonce-deleters.js b/keyserver/src/deleters/siwe-nonce-deleters.js index 981e14d2a..724ad6b61 100644 --- a/keyserver/src/deleters/siwe-nonce-deleters.js +++ b/keyserver/src/deleters/siwe-nonce-deleters.js @@ -1,29 +1,29 @@ // @flow -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; // 30 minutes = 30min * 60sec * 1000ms export const nonceLifetime = 30 * 60 * 1000; async function deleteStaleSIWENonceEntries(): Promise { const earliestValidCreationTime = Date.now() - nonceLifetime; const query = SQL` DELETE FROM siwe_nonces WHERE creation_time < ${earliestValidCreationTime} `; await dbQuery(query); } async function checkAndInvalidateSIWENonceEntry( nonce: string, ): Promise { const earliestValidCreationTime = Date.now() - nonceLifetime; const query = SQL` DELETE FROM siwe_nonces WHERE nonce = ${nonce} AND creation_time > ${earliestValidCreationTime} `; const [result] = await dbQuery(query); return result.affectedRows && result.affectedRows > 0; } export { deleteStaleSIWENonceEntries, checkAndInvalidateSIWENonceEntry }; diff --git a/keyserver/src/deleters/thread-deleters.js b/keyserver/src/deleters/thread-deleters.js index d15d82d43..3de251379 100644 --- a/keyserver/src/deleters/thread-deleters.js +++ b/keyserver/src/deleters/thread-deleters.js @@ -1,158 +1,158 @@ // @flow -import { permissionLookup } from 'lib/permissions/thread-permissions'; -import { hasMinCodeVersion } from 'lib/shared/version-utils'; +import { permissionLookup } from 'lib/permissions/thread-permissions.js'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; import { type ThreadDeletionRequest, type LeaveThreadResult, threadPermissions, -} from 'lib/types/thread-types'; -import { updateTypes } from 'lib/types/update-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/thread-types.js'; +import { updateTypes } from 'lib/types/update-types.js'; +import { ServerError } from 'lib/utils/errors.js'; -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL } from '../database/database'; +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; import { fetchThreadInfos, fetchServerThreadInfos, -} from '../fetchers/thread-fetchers'; -import { fetchThreadPermissionsBlob } from '../fetchers/thread-permission-fetchers'; -import { fetchUpdateInfoForThreadDeletion } from '../fetchers/update-fetchers'; -import { rescindPushNotifs } from '../push/rescind'; -import type { Viewer } from '../session/viewer'; +} from '../fetchers/thread-fetchers.js'; +import { fetchThreadPermissionsBlob } from '../fetchers/thread-permission-fetchers.js'; +import { fetchUpdateInfoForThreadDeletion } from '../fetchers/update-fetchers.js'; +import { rescindPushNotifs } from '../push/rescind.js'; +import type { Viewer } from '../session/viewer.js'; async function deleteThread( viewer: Viewer, threadDeletionRequest: ThreadDeletionRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const { threadID } = threadDeletionRequest; const [ permissionsBlob, { threadInfos: serverThreadInfos }, ] = await Promise.all([ fetchThreadPermissionsBlob(viewer, threadID), fetchServerThreadInfos(SQL`t.id = ${threadID}`), ]); if (!permissionsBlob) { // This should only occur if the first request goes through but the client // never receives the response const [{ updateInfos }, fetchThreadInfoResult] = await Promise.all([ fetchUpdateInfoForThreadDeletion(viewer, threadID), hasMinCodeVersion(viewer.platformDetails, 62) ? undefined : fetchThreadInfos(viewer), ]); if (fetchThreadInfoResult) { const { threadInfos } = fetchThreadInfoResult; return { threadInfos, updatesResult: { newUpdates: updateInfos } }; } return { updatesResult: { newUpdates: updateInfos } }; } const hasPermission = permissionLookup( permissionsBlob, threadPermissions.DELETE_THREAD, ); if (!hasPermission) { throw new ServerError('invalid_credentials'); } await rescindPushNotifs( SQL`n.thread = ${threadID}`, SQL`IF(m.thread = ${threadID}, NULL, m.thread)`, ); // TODO: if org, delete all descendant threads as well. make sure to warn user // TODO: handle descendant thread permission update correctly. // thread-permission-updaters should be used for descendant threads. const query = SQL` DELETE t, ic, d, id, e, ie, re, ire, mm, r, ir, ms, im, up, iu, f, n, ino FROM threads t LEFT JOIN ids ic ON ic.id = t.id LEFT JOIN days d ON d.thread = t.id LEFT JOIN ids id ON id.id = d.id LEFT JOIN entries e ON e.day = d.id LEFT JOIN ids ie ON ie.id = e.id LEFT JOIN revisions re ON re.entry = e.id LEFT JOIN ids ire ON ire.id = re.id LEFT JOIN memberships mm ON mm.thread = t.id LEFT JOIN roles r ON r.thread = t.id LEFT JOIN ids ir ON ir.id = r.id LEFT JOIN messages ms ON ms.thread = t.id LEFT JOIN ids im ON im.id = ms.id LEFT JOIN uploads up ON up.container = ms.id LEFT JOIN ids iu ON iu.id = up.id LEFT JOIN focused f ON f.thread = t.id LEFT JOIN notifications n ON n.thread = t.id LEFT JOIN ids ino ON ino.id = n.id WHERE t.id = ${threadID} `; const serverThreadInfo = serverThreadInfos[threadID]; const time = Date.now(); const updateDatas = []; for (const memberInfo of serverThreadInfo.members) { updateDatas.push({ type: updateTypes.DELETE_THREAD, userID: memberInfo.id, time, threadID, }); } const [{ viewerUpdates }] = await Promise.all([ createUpdates(updateDatas, { viewer, updatesForCurrentSession: 'return' }), dbQuery(query), ]); if (hasMinCodeVersion(viewer.platformDetails, 62)) { return { updatesResult: { newUpdates: viewerUpdates } }; } const { threadInfos } = await fetchThreadInfos(viewer); return { threadInfos, updatesResult: { newUpdates: viewerUpdates, }, }; } async function deleteInaccessibleThreads(): Promise { // A thread is considered "inaccessible" if it has no membership rows. Note // that membership rows exist whenever a user can see a thread, even if they // are not technically a member (in which case role=0). For now, we're also // excluding threads with children, since to properly delete those we would // need to update their parent_thread_id, and possibly change their type. await dbQuery(SQL` DELETE t, i, m2, d, id, e, ie, re, ire, r, ir, ms, im, up, iu, f, n, ino FROM threads t LEFT JOIN ids i ON i.id = t.id LEFT JOIN memberships m1 ON m1.thread = t.id AND m1.role > -1 LEFT JOIN threads c ON c.parent_thread_id = t.id LEFT JOIN memberships m2 ON m2.thread = t.id LEFT JOIN days d ON d.thread = t.id LEFT JOIN ids id ON id.id = d.id LEFT JOIN entries e ON e.day = d.id LEFT JOIN ids ie ON ie.id = e.id LEFT JOIN revisions re ON re.entry = e.id LEFT JOIN ids ire ON ire.id = re.id LEFT JOIN roles r ON r.thread = t.id LEFT JOIN ids ir ON ir.id = r.id LEFT JOIN messages ms ON ms.thread = t.id LEFT JOIN ids im ON im.id = ms.id LEFT JOIN uploads up ON up.container = ms.id LEFT JOIN ids iu ON iu.id = up.id LEFT JOIN focused f ON f.thread = t.id LEFT JOIN notifications n ON n.thread = t.id LEFT JOIN ids ino ON ino.id = n.id WHERE m1.thread IS NULL AND c.id IS NULL `); } export { deleteThread, deleteInaccessibleThreads }; diff --git a/keyserver/src/deleters/update-deleters.js b/keyserver/src/deleters/update-deleters.js index 47f55adca..da4b7d048 100644 --- a/keyserver/src/deleters/update-deleters.js +++ b/keyserver/src/deleters/update-deleters.js @@ -1,52 +1,52 @@ // @flow import invariant from 'invariant'; -import { dbQuery, SQL, mergeOrConditions } from '../database/database'; -import type { SQLStatementType } from '../database/types'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL, mergeOrConditions } from '../database/database.js'; +import type { SQLStatementType } from '../database/types.js'; +import type { Viewer } from '../session/viewer.js'; async function deleteUpdatesByConditions( conditions: $ReadOnlyArray, ) { invariant(conditions.length > 0, 'no conditions specified'); const conditionClause = mergeOrConditions(conditions); const query = SQL` DELETE u, i FROM updates u LEFT JOIN ids i ON i.id = u.id WHERE `; query.append(conditionClause); await dbQuery(query); } async function deleteExpiredUpdates(): Promise { await dbQuery(SQL` DELETE u, i FROM updates u LEFT JOIN ids i ON i.id = u.id LEFT JOIN ( SELECT u.id AS user, COALESCE(MIN(s.last_update), 99999999999999) AS oldest_last_update FROM sessions s RIGHT JOIN users u ON u.id = s.user GROUP BY u.id ) o ON o.user = u.user WHERE o.user IS NULL OR u.time < o.oldest_last_update `); } async function deleteUpdatesBeforeTimeTargetingSession( viewer: Viewer, beforeTime: number, ): Promise { const condition = SQL`u.target = ${viewer.session} AND u.time <= ${beforeTime}`; await deleteUpdatesByConditions([condition]); } export { deleteExpiredUpdates, deleteUpdatesByConditions, deleteUpdatesBeforeTimeTargetingSession, }; diff --git a/keyserver/src/deleters/upload-deleters.js b/keyserver/src/deleters/upload-deleters.js index 72e45ae7c..0625cb1a4 100644 --- a/keyserver/src/deleters/upload-deleters.js +++ b/keyserver/src/deleters/upload-deleters.js @@ -1,51 +1,51 @@ // @flow -import { ServerError } from 'lib/utils/errors'; +import { ServerError } from 'lib/utils/errors.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; async function deleteUpload(viewer: Viewer, id: string): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const fetchQuery = SQL` SELECT uploader, container FROM uploads WHERE id = ${id} `; const [result] = await dbQuery(fetchQuery); if (result.length === 0) { throw new ServerError('invalid_parameters'); } const [row] = result; const { uploader, container } = row; if (uploader.toString() !== viewer.userID || container !== null) { throw new ServerError('invalid_parameters'); } const deleteQuery = SQL` DELETE u, i FROM uploads u LEFT JOIN ids i ON i.id = u.id WHERE u.id = ${id} `; await dbQuery(deleteQuery); } const maxUnassignedUploadAge = 24 * 60 * 60 * 1000; async function deleteUnassignedUploads(): Promise { const oldestUnassignedUploadToKeep = Date.now() - maxUnassignedUploadAge; await dbQuery(SQL` DELETE u, i FROM uploads u LEFT JOIN ids i ON i.id = u.id WHERE u.container IS NULL AND creation_time < ${oldestUnassignedUploadToKeep} `); } export { deleteUpload, deleteUnassignedUploads }; diff --git a/keyserver/src/emails/sendmail.js b/keyserver/src/emails/sendmail.js index 1b1a51076..464aaaa85 100644 --- a/keyserver/src/emails/sendmail.js +++ b/keyserver/src/emails/sendmail.js @@ -1,55 +1,55 @@ // @flow import invariant from 'invariant'; import nodemailer from 'nodemailer'; -import { isDev } from 'lib/utils/dev-utils'; +import { isDev } from 'lib/utils/dev-utils.js'; import { importJSON } from '../utils/import-json.js'; type MailInfo = { +from: string, +to: string, +subject: string, +html: string, ... }; type Transport = { +sendMail: (info: MailInfo) => Promise, ... }; type PostmarkConfig = { +apiToken: string, }; let cachedTransport: ?Transport; async function getSendmail(): Promise { if (cachedTransport) { return cachedTransport; } const postmark: ?PostmarkConfig = await importJSON({ folder: 'secrets', name: 'postmark', }); if (isDev && !postmark) { cachedTransport = nodemailer.createTransport({ sendmail: true }); return cachedTransport; } invariant(postmark, 'Postmark config missing'); cachedTransport = nodemailer.createTransport({ host: 'smtp.postmarkapp.com', port: 587, secure: false, auth: { user: postmark.apiToken, pass: postmark.apiToken, }, requireTLS: true, }); return cachedTransport; } export default getSendmail; diff --git a/keyserver/src/emails/subscribe-email-updates.js b/keyserver/src/emails/subscribe-email-updates.js index d1b317eae..4ad89cf0e 100644 --- a/keyserver/src/emails/subscribe-email-updates.js +++ b/keyserver/src/emails/subscribe-email-updates.js @@ -1,34 +1,34 @@ // @flow import * as React from 'react'; import { Item, Span, renderEmail } from 'react-html-email'; -import ashoat from 'lib/facts/ashoat'; -import type { EmailSubscriptionRequest } from 'lib/types/account-types'; +import ashoat from 'lib/facts/ashoat.js'; +import type { EmailSubscriptionRequest } from 'lib/types/account-types.js'; -import getSendmail from './sendmail'; -import Template from './template.react'; +import getSendmail from './sendmail.js'; +import Template from './template.react.js'; async function sendEmailSubscriptionRequestToAshoat( request: EmailSubscriptionRequest, ): Promise { const title = 'Somebody wants to learn more about Comm!'; const email = ( ); const html = renderEmail(email); const sendmail = await getSendmail(); await sendmail.sendMail({ from: 'no-reply@comm.app', to: ashoat.landing_email, subject: title, html, }); } export { sendEmailSubscriptionRequestToAshoat }; diff --git a/keyserver/src/endpoints.js b/keyserver/src/endpoints.js index 7ae4f3690..ffe0f005b 100644 --- a/keyserver/src/endpoints.js +++ b/keyserver/src/endpoints.js @@ -1,242 +1,242 @@ // @flow import { baseLegalPolicies } from 'lib/facts/policies.js'; -import type { Endpoint } from 'lib/types/endpoints'; +import type { Endpoint } from 'lib/types/endpoints.js'; import { updateActivityResponder, threadSetUnreadStatusResponder, -} from './responders/activity-responders'; -import { deviceTokenUpdateResponder } from './responders/device-responders'; +} from './responders/activity-responders.js'; +import { deviceTokenUpdateResponder } from './responders/device-responders.js'; import { entryFetchResponder, entryRevisionFetchResponder, entryCreationResponder, entryUpdateResponder, entryDeletionResponder, entryRestorationResponder, calendarQueryUpdateResponder, -} from './responders/entry-responders'; -import type { JSONResponder } from './responders/handlers'; -import { getSessionPublicKeysResponder } from './responders/keys-responders'; -import { messageReportCreationResponder } from './responders/message-report-responder'; +} from './responders/entry-responders.js'; +import type { JSONResponder } from './responders/handlers.js'; +import { getSessionPublicKeysResponder } from './responders/keys-responders.js'; +import { messageReportCreationResponder } from './responders/message-report-responder.js'; import { textMessageCreationResponder, messageFetchResponder, multimediaMessageCreationResponder, reactionMessageCreationResponder, -} from './responders/message-responders'; -import { updateRelationshipsResponder } from './responders/relationship-responders'; +} from './responders/message-responders.js'; +import { updateRelationshipsResponder } from './responders/relationship-responders.js'; import { reportCreationResponder, reportMultiCreationResponder, errorReportFetchInfosResponder, -} from './responders/report-responders'; -import { userSearchResponder } from './responders/search-responders'; -import { siweNonceResponder } from './responders/siwe-nonce-responders'; +} from './responders/report-responders.js'; +import { userSearchResponder } from './responders/search-responders.js'; +import { siweNonceResponder } from './responders/siwe-nonce-responders.js'; import { threadDeletionResponder, roleUpdateResponder, memberRemovalResponder, threadLeaveResponder, threadUpdateResponder, threadCreationResponder, threadJoinResponder, -} from './responders/thread-responders'; +} from './responders/thread-responders.js'; import { userSubscriptionUpdateResponder, passwordUpdateResponder, sendVerificationEmailResponder, sendPasswordResetEmailResponder, logOutResponder, accountDeletionResponder, accountCreationResponder, logInResponder, siweAuthResponder, oldPasswordUpdateResponder, updateUserSettingsResponder, policyAcknowledgmentResponder, -} from './responders/user-responders'; -import { codeVerificationResponder } from './responders/verification-responders'; -import { uploadDeletionResponder } from './uploads/uploads'; +} from './responders/user-responders.js'; +import { codeVerificationResponder } from './responders/verification-responders.js'; +import { uploadDeletionResponder } from './uploads/uploads.js'; const jsonEndpoints: { [id: Endpoint]: JSONResponder } = { create_account: { responder: accountCreationResponder, requiredPolicies: [], }, create_entry: { responder: entryCreationResponder, requiredPolicies: baseLegalPolicies, }, create_error_report: { responder: reportCreationResponder, requiredPolicies: [], }, create_message_report: { responder: messageReportCreationResponder, requiredPolicies: baseLegalPolicies, }, create_multimedia_message: { responder: multimediaMessageCreationResponder, requiredPolicies: baseLegalPolicies, }, create_reaction_message: { responder: reactionMessageCreationResponder, requiredPolicies: baseLegalPolicies, }, create_report: { responder: reportCreationResponder, requiredPolicies: [], }, create_reports: { responder: reportMultiCreationResponder, requiredPolicies: [], }, create_text_message: { responder: textMessageCreationResponder, requiredPolicies: baseLegalPolicies, }, create_thread: { responder: threadCreationResponder, requiredPolicies: baseLegalPolicies, }, delete_account: { responder: accountDeletionResponder, requiredPolicies: [], }, delete_entry: { responder: entryDeletionResponder, requiredPolicies: baseLegalPolicies, }, delete_thread: { responder: threadDeletionResponder, requiredPolicies: baseLegalPolicies, }, delete_upload: { responder: uploadDeletionResponder, requiredPolicies: baseLegalPolicies, }, fetch_entries: { responder: entryFetchResponder, requiredPolicies: baseLegalPolicies, }, fetch_entry_revisions: { responder: entryRevisionFetchResponder, requiredPolicies: baseLegalPolicies, }, fetch_error_report_infos: { responder: errorReportFetchInfosResponder, requiredPolicies: baseLegalPolicies, }, fetch_messages: { responder: messageFetchResponder, requiredPolicies: baseLegalPolicies, }, get_session_public_keys: { responder: getSessionPublicKeysResponder, requiredPolicies: baseLegalPolicies, }, join_thread: { responder: threadJoinResponder, requiredPolicies: baseLegalPolicies, }, leave_thread: { responder: threadLeaveResponder, requiredPolicies: baseLegalPolicies, }, log_in: { responder: logInResponder, requiredPolicies: [], }, log_out: { responder: logOutResponder, requiredPolicies: [], }, policy_acknowledgment: { responder: policyAcknowledgmentResponder, requiredPolicies: [], }, remove_members: { responder: memberRemovalResponder, requiredPolicies: baseLegalPolicies, }, restore_entry: { responder: entryRestorationResponder, requiredPolicies: baseLegalPolicies, }, search_users: { responder: userSearchResponder, requiredPolicies: baseLegalPolicies, }, send_password_reset_email: { responder: sendPasswordResetEmailResponder, requiredPolicies: [], }, send_verification_email: { responder: sendVerificationEmailResponder, requiredPolicies: [], }, set_thread_unread_status: { responder: threadSetUnreadStatusResponder, requiredPolicies: baseLegalPolicies, }, update_account: { responder: passwordUpdateResponder, requiredPolicies: baseLegalPolicies, }, update_activity: { responder: updateActivityResponder, requiredPolicies: baseLegalPolicies, }, update_calendar_query: { responder: calendarQueryUpdateResponder, requiredPolicies: baseLegalPolicies, }, update_user_settings: { responder: updateUserSettingsResponder, requiredPolicies: baseLegalPolicies, }, update_device_token: { responder: deviceTokenUpdateResponder, requiredPolicies: [], }, update_entry: { responder: entryUpdateResponder, requiredPolicies: baseLegalPolicies, }, update_password: { responder: oldPasswordUpdateResponder, requiredPolicies: baseLegalPolicies, }, update_relationships: { responder: updateRelationshipsResponder, requiredPolicies: baseLegalPolicies, }, update_role: { responder: roleUpdateResponder, requiredPolicies: baseLegalPolicies, }, update_thread: { responder: threadUpdateResponder, requiredPolicies: baseLegalPolicies, }, update_user_subscription: { responder: userSubscriptionUpdateResponder, requiredPolicies: baseLegalPolicies, }, verify_code: { responder: codeVerificationResponder, requiredPolicies: baseLegalPolicies, }, siwe_nonce: { responder: siweNonceResponder, requiredPolicies: [], }, siwe_auth: { responder: siweAuthResponder, requiredPolicies: [], }, }; export { jsonEndpoints }; diff --git a/keyserver/src/fetchers/entry-fetchers.js b/keyserver/src/fetchers/entry-fetchers.js index c8730a5ec..fbac1375a 100644 --- a/keyserver/src/fetchers/entry-fetchers.js +++ b/keyserver/src/fetchers/entry-fetchers.js @@ -1,353 +1,353 @@ // @flow import invariant from 'invariant'; -import { permissionLookup } from 'lib/permissions/thread-permissions'; +import { permissionLookup } from 'lib/permissions/thread-permissions.js'; import { filteredThreadIDs, filterExists, nonExcludeDeletedCalendarFilters, -} from 'lib/selectors/calendar-filter-selectors'; -import { rawEntryInfoWithinCalendarQuery } from 'lib/shared/entry-utils'; +} from 'lib/selectors/calendar-filter-selectors.js'; +import { rawEntryInfoWithinCalendarQuery } from 'lib/shared/entry-utils.js'; import type { CalendarQuery, FetchEntryInfosBase, DeltaEntryInfosResponse, RawEntryInfo, -} from 'lib/types/entry-types'; -import { calendarThreadFilterTypes } from 'lib/types/filter-types'; -import type { HistoryRevisionInfo } from 'lib/types/history-types'; +} from 'lib/types/entry-types.js'; +import { calendarThreadFilterTypes } from 'lib/types/filter-types.js'; +import type { HistoryRevisionInfo } from 'lib/types/history-types.js'; import { threadPermissions, type ThreadPermission, -} from 'lib/types/thread-types'; -import { dateString } from 'lib/utils/date-utils'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/thread-types.js'; +import { dateString } from 'lib/utils/date-utils.js'; +import { ServerError } from 'lib/utils/errors.js'; import { dbQuery, SQL, mergeAndConditions, mergeOrConditions, -} from '../database/database'; -import type { SQLStatementType } from '../database/types'; -import type { Viewer } from '../session/viewer'; -import { creationString } from '../utils/idempotent'; -import { checkIfThreadIsBlocked } from './thread-permission-fetchers'; +} from '../database/database.js'; +import type { SQLStatementType } from '../database/types.js'; +import type { Viewer } from '../session/viewer.js'; +import { creationString } from '../utils/idempotent.js'; +import { checkIfThreadIsBlocked } from './thread-permission-fetchers.js'; async function fetchEntryInfo( viewer: Viewer, entryID: string, ): Promise { const results = await fetchEntryInfosByID(viewer, [entryID]); if (results.length === 0) { return null; } return results[0]; } function rawEntryInfoFromRow(row: Object): RawEntryInfo { return { id: row.id.toString(), threadID: row.threadID.toString(), text: row.text, year: row.year, month: row.month, day: row.day, creationTime: row.creationTime, creatorID: row.creatorID.toString(), deleted: !!row.deleted, }; } const visPermissionExtractString = `$.${threadPermissions.VISIBLE}.value`; async function fetchEntryInfosByID( viewer: Viewer, entryIDs: $ReadOnlyArray, ): Promise { if (entryIDs.length === 0) { return []; } const viewerID = viewer.id; const query = SQL` SELECT DAY(d.date) AS day, MONTH(d.date) AS month, YEAR(d.date) AS year, e.id, e.text, e.creation_time AS creationTime, d.thread AS threadID, e.deleted, e.creator AS creatorID FROM entries e LEFT JOIN days d ON d.id = e.day LEFT JOIN memberships m ON m.thread = d.thread AND m.user = ${viewerID} WHERE e.id IN (${entryIDs}) AND JSON_EXTRACT(m.permissions, ${visPermissionExtractString}) IS TRUE `; const [result] = await dbQuery(query); return result.map(rawEntryInfoFromRow); } function sqlConditionForCalendarQuery( calendarQuery: CalendarQuery, ): ?SQLStatementType { const { filters, startDate, endDate } = calendarQuery; const conditions = []; conditions.push(SQL`d.date BETWEEN ${startDate} AND ${endDate}`); const filterToThreadIDs = filteredThreadIDs(filters); if (filterToThreadIDs && filterToThreadIDs.size > 0) { conditions.push(SQL`d.thread IN (${[...filterToThreadIDs]})`); } else if (filterToThreadIDs) { // Filter to empty set means the result is empty return null; } else { conditions.push(SQL`m.role > 0`); } if (filterExists(filters, calendarThreadFilterTypes.NOT_DELETED)) { conditions.push(SQL`e.deleted = 0`); } return mergeAndConditions(conditions); } async function fetchEntryInfos( viewer: Viewer, calendarQueries: $ReadOnlyArray, ): Promise { const queryConditions = calendarQueries .map(sqlConditionForCalendarQuery) .filter(Boolean); if (queryConditions.length === 0) { return { rawEntryInfos: [] }; } const queryCondition = mergeOrConditions(queryConditions); const viewerID = viewer.id; const query = SQL` SELECT DAY(d.date) AS day, MONTH(d.date) AS month, YEAR(d.date) AS year, e.id, e.text, e.creation_time AS creationTime, d.thread AS threadID, e.deleted, e.creator AS creatorID FROM entries e LEFT JOIN days d ON d.id = e.day LEFT JOIN memberships m ON m.thread = d.thread AND m.user = ${viewerID} WHERE JSON_EXTRACT(m.permissions, ${visPermissionExtractString}) IS TRUE AND `; query.append(queryCondition); query.append(SQL`ORDER BY e.creation_time DESC`); const [result] = await dbQuery(query); const rawEntryInfos = []; for (const row of result) { rawEntryInfos.push(rawEntryInfoFromRow(row)); } return { rawEntryInfos }; } async function checkThreadPermissionForEntry( viewer: Viewer, entryID: string, permission: ThreadPermission, ): Promise { const viewerID = viewer.id; const query = SQL` SELECT m.permissions, t.id FROM entries e LEFT JOIN days d ON d.id = e.day LEFT JOIN threads t ON t.id = d.thread LEFT JOIN memberships m ON m.thread = t.id AND m.user = ${viewerID} WHERE e.id = ${entryID} `; const [result] = await dbQuery(query); if (result.length === 0) { return false; } const row = result[0]; if (row.id === null) { return false; } const threadIsBlocked = await checkIfThreadIsBlocked( viewer, row.id.toString(), permission, ); if (threadIsBlocked) { return false; } const permissions = JSON.parse(row.permissions); return permissionLookup(permissions, permission); } async function fetchEntryRevisionInfo( viewer: Viewer, entryID: string, ): Promise<$ReadOnlyArray> { const hasPermission = await checkThreadPermissionForEntry( viewer, entryID, threadPermissions.VISIBLE, ); if (!hasPermission) { throw new ServerError('invalid_credentials'); } const query = SQL` SELECT r.id, r.author AS authorID, r.text, r.last_update AS lastUpdate, r.deleted, d.thread AS threadID, r.entry AS entryID FROM revisions r LEFT JOIN entries e ON e.id = r.entry LEFT JOIN days d ON d.id = e.day WHERE r.entry = ${entryID} ORDER BY r.last_update DESC `; const [result] = await dbQuery(query); const revisions = []; for (const row of result) { revisions.push({ id: row.id.toString(), authorID: row.authorID.toString(), text: row.text, lastUpdate: row.lastUpdate, deleted: !!row.deleted, threadID: row.threadID.toString(), entryID: row.entryID.toString(), }); } return revisions; } // calendarQueries are the "difference" queries we get from subtracting the old // CalendarQuery from the new one. See calendarQueryDifference. // oldCalendarQuery is the old CalendarQuery. We make sure none of the returned // RawEntryInfos match the old CalendarQuery, so that only the difference is // returned. async function fetchEntriesForSession( viewer: Viewer, calendarQueries: $ReadOnlyArray, oldCalendarQuery: CalendarQuery, ): Promise { // If we're not including deleted entries, we will try and set deletedEntryIDs // so that the client can catch possibly stale deleted entryInfos let filterDeleted = null; for (const calendarQuery of calendarQueries) { const notDeletedFilterExists = filterExists( calendarQuery.filters, calendarThreadFilterTypes.NOT_DELETED, ); if (filterDeleted === null) { filterDeleted = notDeletedFilterExists; } else { invariant( filterDeleted === notDeletedFilterExists, 'one of the CalendarQueries returned by calendarQueryDifference has ' + 'a NOT_DELETED filter but another does not: ' + JSON.stringify(calendarQueries), ); } } let calendarQueriesForFetch = calendarQueries; if (filterDeleted) { // Because in the filterDeleted case we still need the deleted RawEntryInfos // in order to construct deletedEntryIDs, we get rid of the NOT_DELETED // filters before passing the CalendarQueries to fetchEntryInfos. We will // filter out the deleted RawEntryInfos in a later step. calendarQueriesForFetch = calendarQueriesForFetch.map(calendarQuery => ({ ...calendarQuery, filters: nonExcludeDeletedCalendarFilters(calendarQuery.filters), })); } const { rawEntryInfos } = await fetchEntryInfos( viewer, calendarQueriesForFetch, ); const entryInfosNotInOldQuery = rawEntryInfos.filter( rawEntryInfo => !rawEntryInfoWithinCalendarQuery(rawEntryInfo, oldCalendarQuery), ); let filteredRawEntryInfos = entryInfosNotInOldQuery; let deletedEntryIDs = []; if (filterDeleted) { filteredRawEntryInfos = entryInfosNotInOldQuery.filter( rawEntryInfo => !rawEntryInfo.deleted, ); deletedEntryIDs = entryInfosNotInOldQuery .filter(rawEntryInfo => rawEntryInfo.deleted) .map(rawEntryInfo => { const { id } = rawEntryInfo; invariant( id !== null && id !== undefined, 'serverID should be set in fetchEntryInfos result', ); return id; }); } return { rawEntryInfos: filteredRawEntryInfos, deletedEntryIDs, }; } async function fetchEntryInfoForLocalID( viewer: Viewer, localID: ?string, ): Promise { if (!localID || !viewer.hasSessionInfo) { return null; } const creation = creationString(viewer, localID); const viewerID = viewer.id; const query = SQL` SELECT DAY(d.date) AS day, MONTH(d.date) AS month, YEAR(d.date) AS year, e.id, e.text, e.creation_time AS creationTime, d.thread AS threadID, e.deleted, e.creator AS creatorID FROM entries e LEFT JOIN days d ON d.id = e.day LEFT JOIN memberships m ON m.thread = d.thread AND m.user = ${viewerID} WHERE e.creator = ${viewerID} AND e.creation = ${creation} AND JSON_EXTRACT(m.permissions, ${visPermissionExtractString}) IS TRUE `; const [result] = await dbQuery(query); if (result.length === 0) { return null; } return rawEntryInfoFromRow(result[0]); } function getSunday(weeksFromLastSunday: number) { const date = new Date(); const today = date.getDate(); const currentDay = date.getDay(); const newDate = date.setDate(today - currentDay + 7 * weeksFromLastSunday); return new Date(newDate); } async function fetchEntryInfosForThreadThisWeek( viewer: Viewer, threadID: string, ): Promise<$ReadOnlyArray> { const startDate = dateString(getSunday(0)); const endDate = dateString(getSunday(1)); const filters = [ { type: 'not_deleted' }, { type: 'threads', threadIDs: [threadID] }, ]; const { rawEntryInfos } = await fetchEntryInfos(viewer, [ { startDate, endDate, filters }, ]); return rawEntryInfos; } export { fetchEntryInfo, fetchEntryInfosByID, fetchEntryInfos, checkThreadPermissionForEntry, fetchEntryRevisionInfo, fetchEntriesForSession, fetchEntryInfoForLocalID, fetchEntryInfosForThreadThisWeek, }; diff --git a/keyserver/src/fetchers/key-fetchers.js b/keyserver/src/fetchers/key-fetchers.js index b853ff119..181965aaa 100644 --- a/keyserver/src/fetchers/key-fetchers.js +++ b/keyserver/src/fetchers/key-fetchers.js @@ -1,53 +1,53 @@ // @flow -import type { SessionPublicKeys } from 'lib/types/session-types'; -import { minimumOneTimeKeysRequired } from 'lib/utils/crypto-utils'; -import { ServerError } from 'lib/utils/errors'; +import type { SessionPublicKeys } from 'lib/types/session-types.js'; +import { minimumOneTimeKeysRequired } from 'lib/utils/crypto-utils.js'; +import { ServerError } from 'lib/utils/errors.js'; -import { dbQuery, SQL } from '../database/database'; -import { deleteOneTimeKey } from '../deleters/one-time-key-deleters'; +import { dbQuery, SQL } from '../database/database.js'; +import { deleteOneTimeKey } from '../deleters/one-time-key-deleters.js'; async function checkIfSessionHasEnoughOneTimeKeys( session: string, ): Promise { const query = SQL` SELECT COUNT(*) AS count FROM one_time_keys WHERE session = ${session} `; const [queryResult] = await dbQuery(query); if (!queryResult.length || queryResult[0].count === undefined) { throw new ServerError('internal_error'); } const [{ count }] = queryResult; return count >= minimumOneTimeKeysRequired; } async function fetchSessionPublicKeys( session: string, ): Promise { const query = SQL` SELECT s.public_key, otk.one_time_key FROM sessions s LEFT JOIN one_time_keys otk ON otk.session = s.id WHERE s.id = ${session} LIMIT 1 `; const [queryResult] = await dbQuery(query); if (!queryResult.length) { return null; } const [result] = queryResult; if (!result.public_key) { return null; } const oneTimeKey = result.one_time_key; const identityKey = result.public_key; await deleteOneTimeKey(session, oneTimeKey); return { identityKey, oneTimeKey }; } export { fetchSessionPublicKeys, checkIfSessionHasEnoughOneTimeKeys }; diff --git a/keyserver/src/fetchers/message-fetchers.js b/keyserver/src/fetchers/message-fetchers.js index 3ee014e92..3d59f46fd 100644 --- a/keyserver/src/fetchers/message-fetchers.js +++ b/keyserver/src/fetchers/message-fetchers.js @@ -1,717 +1,720 @@ // @flow import invariant from 'invariant'; import { sortMessageInfoList, shimUnsupportedRawMessageInfos, -} from 'lib/shared/message-utils'; -import { messageSpecs } from 'lib/shared/messages/message-specs'; -import { notifCollapseKeyForRawMessageInfo } from 'lib/shared/notif-utils'; -import { hasMinCodeVersion } from 'lib/shared/version-utils'; +} from 'lib/shared/message-utils.js'; +import { messageSpecs } from 'lib/shared/messages/message-specs.js'; +import { notifCollapseKeyForRawMessageInfo } from 'lib/shared/notif-utils.js'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; import { type RawMessageInfo, type RawComposableMessageInfo, type RawRobotextMessageInfo, messageTypes, type MessageType, assertMessageType, type MessageSelectionCriteria, type MessageTruncationStatus, messageTruncationStatus, type FetchMessageInfosResult, defaultMaxMessageAge, -} from 'lib/types/message-types'; -import { threadPermissions } from 'lib/types/thread-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/message-types.js'; +import { threadPermissions } from 'lib/types/thread-types.js'; +import { ServerError } from 'lib/utils/errors.js'; import { dbQuery, SQL, mergeOrConditions, mergeAndConditions, -} from '../database/database'; -import type { SQLStatementType } from '../database/types'; -import type { PushInfo } from '../push/send'; -import type { Viewer } from '../session/viewer'; -import { creationString, localIDFromCreationString } from '../utils/idempotent'; +} from '../database/database.js'; +import type { SQLStatementType } from '../database/types.js'; +import type { PushInfo } from '../push/send.js'; +import type { Viewer } from '../session/viewer.js'; +import { + creationString, + localIDFromCreationString, +} from '../utils/idempotent.js'; import { constructMediaFromMediaMessageContentsAndUploadRows, mediaFromRow, -} from './upload-fetchers'; +} from './upload-fetchers.js'; export type CollapsableNotifInfo = { collapseKey: ?string, existingMessageInfos: RawMessageInfo[], newMessageInfos: RawMessageInfo[], }; export type FetchCollapsableNotifsResult = { [userID: string]: CollapsableNotifInfo[], }; const visibleExtractString = `$.${threadPermissions.VISIBLE}.value`; // This function doesn't filter RawMessageInfos based on what messageTypes the // client supports, since each user can have multiple clients. The caller must // handle this filtering. async function fetchCollapsableNotifs( pushInfo: PushInfo, ): Promise { // First, we need to fetch any notifications that should be collapsed const usersToCollapseKeysToInfo = {}; const usersToCollapsableNotifInfo = {}; for (const userID in pushInfo) { usersToCollapseKeysToInfo[userID] = {}; usersToCollapsableNotifInfo[userID] = []; for (const rawMessageInfo of pushInfo[userID].messageInfos) { const collapseKey = notifCollapseKeyForRawMessageInfo(rawMessageInfo); if (!collapseKey) { const collapsableNotifInfo = { collapseKey, existingMessageInfos: [], newMessageInfos: [rawMessageInfo], }; usersToCollapsableNotifInfo[userID].push(collapsableNotifInfo); continue; } if (!usersToCollapseKeysToInfo[userID][collapseKey]) { usersToCollapseKeysToInfo[userID][collapseKey] = { collapseKey, existingMessageInfos: [], newMessageInfos: [], }; } usersToCollapseKeysToInfo[userID][collapseKey].newMessageInfos.push( rawMessageInfo, ); } } const sqlTuples = []; for (const userID in usersToCollapseKeysToInfo) { const collapseKeysToInfo = usersToCollapseKeysToInfo[userID]; for (const collapseKey in collapseKeysToInfo) { sqlTuples.push( SQL`(n.user = ${userID} AND n.collapse_key = ${collapseKey})`, ); } } if (sqlTuples.length === 0) { return usersToCollapsableNotifInfo; } const collapseQuery = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, n.user, n.collapse_key, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM notifications n LEFT JOIN messages m ON m.id = n.message LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = n.user LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = n.user WHERE n.rescinded = 0 AND JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE AND `; collapseQuery.append(mergeOrConditions(sqlTuples)); collapseQuery.append(SQL`ORDER BY m.time DESC, m.id DESC`); const [collapseResult] = await dbQuery(collapseQuery); const rowsByUser = new Map(); for (const row of collapseResult) { const user = row.user.toString(); const currentRowsForUser = rowsByUser.get(user); if (currentRowsForUser) { currentRowsForUser.push(row); } else { rowsByUser.set(user, [row]); } } const derivedMessages = await fetchDerivedMessages(collapseResult); for (const userRows of rowsByUser.values()) { const messages = parseMessageSQLResult(userRows, derivedMessages); for (const message of messages) { const { rawMessageInfo, rows } = message; const [row] = rows; const info = usersToCollapseKeysToInfo[row.user][row.collapse_key]; info.existingMessageInfos.push(rawMessageInfo); } } for (const userID in usersToCollapseKeysToInfo) { const collapseKeysToInfo = usersToCollapseKeysToInfo[userID]; for (const collapseKey in collapseKeysToInfo) { const info = collapseKeysToInfo[collapseKey]; usersToCollapsableNotifInfo[userID].push({ collapseKey: info.collapseKey, existingMessageInfos: sortMessageInfoList(info.existingMessageInfos), newMessageInfos: sortMessageInfoList(info.newMessageInfos), }); } } return usersToCollapsableNotifInfo; } type MessageSQLResult = $ReadOnlyArray<{ rawMessageInfo: RawMessageInfo, rows: $ReadOnlyArray, }>; function parseMessageSQLResult( rows: $ReadOnlyArray, derivedMessages: $ReadOnlyMap< string, RawComposableMessageInfo | RawRobotextMessageInfo, >, viewer?: Viewer, ): MessageSQLResult { const rowsByID = new Map(); for (const row of rows) { const id = row.id.toString(); const currentRowsForID = rowsByID.get(id); if (currentRowsForID) { currentRowsForID.push(row); } else { rowsByID.set(id, [row]); } } const messages = []; for (const messageRows of rowsByID.values()) { const rawMessageInfo = rawMessageInfoFromRows( messageRows, viewer, derivedMessages, ); if (rawMessageInfo) { messages.push({ rawMessageInfo, rows: messageRows }); } } return messages; } function assertSingleRow(rows: $ReadOnlyArray): Object { if (rows.length === 0) { throw new Error('expected single row, but none present!'); } else if (rows.length !== 1) { const messageIDs = rows.map(row => row.id.toString()); console.warn( `expected single row, but there are multiple! ${messageIDs.join(', ')}`, ); } return rows[0]; } function mostRecentRowType(rows: $ReadOnlyArray): MessageType { if (rows.length === 0) { throw new Error('expected row, but none present!'); } return assertMessageType(rows[0].type); } function rawMessageInfoFromRows( rawRows: $ReadOnlyArray, viewer?: Viewer, derivedMessages: $ReadOnlyMap< string, RawComposableMessageInfo | RawRobotextMessageInfo, >, ): ?RawMessageInfo { const rows = rawRows.map(row => ({ ...row, subthread_permissions: JSON.parse(row.subthread_permissions), })); const type = mostRecentRowType(rows); const messageSpec = messageSpecs[type]; if (type === messageTypes.IMAGES || type === messageTypes.MULTIMEDIA) { let media; if (type === messageTypes.MULTIMEDIA) { const mediaMessageContents = JSON.parse(rows[0].content); media = constructMediaFromMediaMessageContentsAndUploadRows( mediaMessageContents, rows, ); } else { media = rows.filter(row => row.uploadID).map(mediaFromRow); } const [row] = rows; const localID = localIDFromCreationString(viewer, row.creation); invariant( messageSpec.rawMessageInfoFromServerDBRow, `multimedia message spec should have rawMessageInfoFromServerDBRow`, ); return messageSpec.rawMessageInfoFromServerDBRow(row, { media, derivedMessages, localID, }); } const row = assertSingleRow(rows); const localID = localIDFromCreationString(viewer, row.creation); invariant( messageSpec.rawMessageInfoFromServerDBRow, `message spec ${type} should have rawMessageInfoFromServerDBRow`, ); return messageSpec.rawMessageInfoFromServerDBRow(row, { derivedMessages, localID, }); } async function fetchMessageInfos( viewer: Viewer, criteria: MessageSelectionCriteria, numberPerThread: number, ): Promise { const { sqlClause: selectionClause, timeFilterData, } = parseMessageSelectionCriteria(viewer, criteria); const truncationStatuses = {}; const viewerID = viewer.id; const query = SQL` WITH thread_window AS ( SELECT m.id, m.thread AS threadID, m.user AS creatorID, m.target_message as targetMessageID, m.content, m.time, m.type, m.creation, stm.permissions AS subthread_permissions, ROW_NUMBER() OVER ( PARTITION BY threadID ORDER BY m.time DESC, m.id DESC ) n FROM messages m LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = ${viewerID} LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = ${viewerID} WHERE JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE AND `; query.append(selectionClause); query.append(SQL` ) SELECT tw.*, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM thread_window tw LEFT JOIN uploads up ON up.container = tw.id WHERE tw.n <= ${numberPerThread} ORDER BY tw.threadID, tw.time DESC, tw.id DESC `); const [result] = await dbQuery(query); const derivedMessages = await fetchDerivedMessages(result, viewer); const messages = await parseMessageSQLResult(result, derivedMessages, viewer); const rawMessageInfos = []; const threadToMessageCount = new Map(); for (const message of messages) { const { rawMessageInfo } = message; rawMessageInfos.push(rawMessageInfo); const { threadID } = rawMessageInfo; const currentCountValue = threadToMessageCount.get(threadID); const currentCount = currentCountValue ? currentCountValue : 0; threadToMessageCount.set(threadID, currentCount + 1); } for (const [threadID, messageCount] of threadToMessageCount) { // If we matched the exact amount we limited to, we're probably truncating // our result set. By setting TRUNCATED here, we tell the client that the // result set might not be continguous with what's already in their // MessageStore. More details about TRUNCATED can be found in // lib/types/message-types.js if (messageCount >= numberPerThread) { // We won't set TRUNCATED if a cursor was specified for a given thread, // since then the result is guaranteed to be contiguous with what the // client has if (criteria.threadCursors && criteria.threadCursors[threadID]) { truncationStatuses[threadID] = messageTruncationStatus.UNCHANGED; } else { truncationStatuses[threadID] = messageTruncationStatus.TRUNCATED; } continue; } const hasTimeFilter = hasTimeFilterForThread(timeFilterData, threadID); if (!hasTimeFilter) { // If there is no time filter for a given thread, and there are fewer // messages returned than the max we queried for a given thread, we can // conclude that our result set includes all messages for that thread truncationStatuses[threadID] = messageTruncationStatus.EXHAUSTIVE; } } for (const rawMessageInfo of rawMessageInfos) { if (messageSpecs[rawMessageInfo.type].startsThread) { truncationStatuses[rawMessageInfo.threadID] = messageTruncationStatus.EXHAUSTIVE; } } for (const threadID in criteria.threadCursors) { const truncationStatus = truncationStatuses[threadID]; if (truncationStatus !== null && truncationStatus !== undefined) { continue; } const hasTimeFilter = hasTimeFilterForThread(timeFilterData, threadID); if (!hasTimeFilter) { // If there is no time filter for a given thread, and zero messages were // returned, we can conclude that this thread has zero messages. This is // a case of database corruption that should not be possible, but likely // we have some threads like this on prod (either due to some transient // issues or due to old buggy code) truncationStatuses[threadID] = messageTruncationStatus.EXHAUSTIVE; } else { // If this thread was explicitly queried for, and we got no results, but // we can't conclude that it's EXHAUSTIVE, then we'll set to UNCHANGED. truncationStatuses[threadID] = messageTruncationStatus.UNCHANGED; } } const shimmedRawMessageInfos = shimUnsupportedRawMessageInfos( rawMessageInfos, viewer.platformDetails, ); return { rawMessageInfos: shimmedRawMessageInfos, truncationStatuses, }; } function hasTimeFilterForThread( timeFilterData: TimeFilterData, threadID: string, ) { if (timeFilterData.timeFilter === 'ALL') { return true; } else if (timeFilterData.timeFilter === 'NONE') { return false; } else if (timeFilterData.timeFilter === 'ALL_EXCEPT_EXCLUDED') { return !timeFilterData.excludedFromTimeFilter.has(threadID); } else { invariant( false, `unrecognized timeFilter type ${timeFilterData.timeFilter}`, ); } } type TimeFilterData = | { +timeFilter: 'ALL' | 'NONE' } | { +timeFilter: 'ALL_EXCEPT_EXCLUDED', +excludedFromTimeFilter: $ReadOnlySet, }; type ParsedMessageSelectionCriteria = { +sqlClause: SQLStatementType, +timeFilterData: TimeFilterData, }; function parseMessageSelectionCriteria( viewer: Viewer, criteria: MessageSelectionCriteria, ): ParsedMessageSelectionCriteria { const minMessageTime = Date.now() - defaultMaxMessageAge; const shouldApplyTimeFilter = hasMinCodeVersion(viewer.platformDetails, 130); let globalTimeFilter; if (criteria.newerThan) { globalTimeFilter = SQL`m.time > ${criteria.newerThan}`; } else if (!criteria.threadCursors && shouldApplyTimeFilter) { globalTimeFilter = SQL`m.time > ${minMessageTime}`; } const threadConditions = []; if ( criteria.joinedThreads === true && shouldApplyTimeFilter && !globalTimeFilter ) { threadConditions.push(SQL`(mm.role > 0 AND m.time > ${minMessageTime})`); } else if (criteria.joinedThreads === true) { threadConditions.push(SQL`mm.role > 0`); } if (criteria.threadCursors) { for (const threadID in criteria.threadCursors) { const cursor = criteria.threadCursors[threadID]; if (cursor) { threadConditions.push( SQL`(m.thread = ${threadID} AND m.id < ${cursor})`, ); } else { threadConditions.push(SQL`m.thread = ${threadID}`); } } } if (threadConditions.length === 0) { throw new ServerError('internal_error'); } const threadClause = mergeOrConditions(threadConditions); let timeFilterData; if (globalTimeFilter) { timeFilterData = { timeFilter: 'ALL' }; } else if (!shouldApplyTimeFilter) { timeFilterData = { timeFilter: 'NONE' }; } else { invariant( criteria.threadCursors, 'ALL_EXCEPT_EXCLUDED should correspond to threadCursors being set', ); const excludedFromTimeFilter = new Set(Object.keys(criteria.threadCursors)); timeFilterData = { timeFilter: 'ALL_EXCEPT_EXCLUDED', excludedFromTimeFilter, }; } const conditions = [globalTimeFilter, threadClause].filter(Boolean); const sqlClause = mergeAndConditions(conditions); return { sqlClause, timeFilterData }; } function messageSelectionCriteriaToInitialTruncationStatuses( criteria: MessageSelectionCriteria, defaultTruncationStatus: MessageTruncationStatus, ) { const truncationStatuses = {}; if (criteria.threadCursors) { for (const threadID in criteria.threadCursors) { truncationStatuses[threadID] = defaultTruncationStatus; } } return truncationStatuses; } async function fetchMessageInfosSince( viewer: Viewer, criteria: MessageSelectionCriteria, maxNumberPerThread: number, ): Promise { const { sqlClause: selectionClause } = parseMessageSelectionCriteria( viewer, criteria, ); const truncationStatuses = messageSelectionCriteriaToInitialTruncationStatuses( criteria, messageTruncationStatus.UNCHANGED, ); const viewerID = viewer.id; const query = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM messages m LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = ${viewerID} LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = ${viewerID} WHERE JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE AND `; query.append(selectionClause); query.append(SQL` ORDER BY m.thread, m.time DESC, m.id DESC `); const [result] = await dbQuery(query); const derivedMessages = await fetchDerivedMessages(result, viewer); const messages = await parseMessageSQLResult(result, derivedMessages, viewer); const rawMessageInfos = []; let currentThreadID = null; let numMessagesForCurrentThreadID = 0; for (const message of messages) { const { rawMessageInfo } = message; const { threadID } = rawMessageInfo; if (threadID !== currentThreadID) { currentThreadID = threadID; numMessagesForCurrentThreadID = 1; truncationStatuses[threadID] = messageTruncationStatus.UNCHANGED; } else { numMessagesForCurrentThreadID++; } if (numMessagesForCurrentThreadID <= maxNumberPerThread) { if (messageSpecs[rawMessageInfo.type].startsThread) { truncationStatuses[threadID] = messageTruncationStatus.EXHAUSTIVE; } rawMessageInfos.push(rawMessageInfo); } else if (numMessagesForCurrentThreadID === maxNumberPerThread + 1) { truncationStatuses[threadID] = messageTruncationStatus.TRUNCATED; } } const shimmedRawMessageInfos = shimUnsupportedRawMessageInfos( rawMessageInfos, viewer.platformDetails, ); return { rawMessageInfos: shimmedRawMessageInfos, truncationStatuses, }; } function getMessageFetchResultFromRedisMessages( viewer: Viewer, rawMessageInfos: $ReadOnlyArray, ): FetchMessageInfosResult { const truncationStatuses = {}; for (const rawMessageInfo of rawMessageInfos) { truncationStatuses[rawMessageInfo.threadID] = messageTruncationStatus.UNCHANGED; } const shimmedRawMessageInfos = shimUnsupportedRawMessageInfos( rawMessageInfos, viewer.platformDetails, ); return { rawMessageInfos: shimmedRawMessageInfos, truncationStatuses, }; } async function fetchMessageInfoForLocalID( viewer: Viewer, localID: ?string, ): Promise { if (!localID || !viewer.hasSessionInfo) { return null; } const creation = creationString(viewer, localID); const viewerID = viewer.id; const query = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM messages m LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = ${viewerID} LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = ${viewerID} WHERE m.user = ${viewerID} AND m.creation = ${creation} AND JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE `; const [result] = await dbQuery(query); if (result.length === 0) { return null; } const derivedMessages = await fetchDerivedMessages(result, viewer); return rawMessageInfoFromRows(result, viewer, derivedMessages); } const entryIDExtractString = '$.entryID'; async function fetchMessageInfoForEntryAction( viewer: Viewer, messageType: MessageType, entryID: string, threadID: string, ): Promise { const viewerID = viewer.id; const query = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM messages m LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships mm ON mm.thread = m.thread AND mm.user = ${viewerID} WHERE m.user = ${viewerID} AND m.thread = ${threadID} AND m.type = ${messageType} AND JSON_EXTRACT(m.content, ${entryIDExtractString}) = ${entryID} AND JSON_EXTRACT(mm.permissions, ${visibleExtractString}) IS TRUE `; const [result] = await dbQuery(query); if (result.length === 0) { return null; } const derivedMessages = await fetchDerivedMessages(result, viewer); return rawMessageInfoFromRows(result, viewer, derivedMessages); } async function fetchMessageRowsByIDs(messageIDs: $ReadOnlyArray) { const query = SQL` SELECT m.id, m.thread AS threadID, m.content, m.time, m.type, m.creation, m.user AS creatorID, m.target_message as targetMessageID, stm.permissions AS subthread_permissions, up.id AS uploadID, up.type AS uploadType, up.secret AS uploadSecret, up.extra AS uploadExtra FROM messages m LEFT JOIN uploads up ON up.container = m.id LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = m.user WHERE m.id IN (${messageIDs}) `; const [result] = await dbQuery(query); return result; } async function fetchDerivedMessages( rows: $ReadOnlyArray, viewer?: Viewer, ): Promise< $ReadOnlyMap, > { const requiredIDs = new Set(); for (const row of rows) { if (row.type === messageTypes.SIDEBAR_SOURCE) { const content = JSON.parse(row.content); requiredIDs.add(content.sourceMessageID); } } const messagesByID = new Map< string, RawComposableMessageInfo | RawRobotextMessageInfo, >(); if (requiredIDs.size === 0) { return messagesByID; } const result = await fetchMessageRowsByIDs([...requiredIDs]); const messages = await parseMessageSQLResult(result, new Map(), viewer); for (const message of messages) { const { rawMessageInfo } = message; if (rawMessageInfo.id) { invariant( rawMessageInfo.type !== messageTypes.SIDEBAR_SOURCE && rawMessageInfo.type !== messageTypes.REACTION, 'SIDEBAR_SOURCE should not point to a SIDEBAR_SOURCE or REACTION', ); messagesByID.set(rawMessageInfo.id, rawMessageInfo); } } return messagesByID; } async function fetchMessageInfoByID( viewer?: Viewer, messageID: string, ): Promise { const result = await fetchMessageRowsByIDs([messageID]); if (result.length === 0) { return null; } const derivedMessages = await fetchDerivedMessages(result, viewer); return rawMessageInfoFromRows(result, viewer, derivedMessages); } export { fetchCollapsableNotifs, fetchMessageInfos, fetchMessageInfosSince, getMessageFetchResultFromRedisMessages, fetchMessageInfoForLocalID, fetchMessageInfoForEntryAction, fetchMessageInfoByID, }; diff --git a/keyserver/src/fetchers/relationship-fetchers.js b/keyserver/src/fetchers/relationship-fetchers.js index 11b5474fb..a27582b25 100644 --- a/keyserver/src/fetchers/relationship-fetchers.js +++ b/keyserver/src/fetchers/relationship-fetchers.js @@ -1,104 +1,107 @@ // @flow -import _groupBy from 'lodash/fp/groupBy'; +import _groupBy from 'lodash/fp/groupBy.js'; -import type { RelationshipErrors } from 'lib/types/relationship-types'; -import { undirectedStatus, directedStatus } from 'lib/types/relationship-types'; +import type { RelationshipErrors } from 'lib/types/relationship-types.js'; +import { + undirectedStatus, + directedStatus, +} from 'lib/types/relationship-types.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; type RelationshipOperation = | 'delete_directed' | 'friend' | 'pending_friend' | 'know_of'; type UserRelationshipOperations = { [string]: $ReadOnlyArray, }; type UserRelationshipOperationsResult = { +errors: RelationshipErrors, +userRelationshipOperations: UserRelationshipOperations, }; async function fetchFriendRequestRelationshipOperations( viewer: Viewer, userIDs: string[], ): Promise { const query = SQL` SELECT user1, user2, status FROM relationships_directed WHERE (user1 IN (${userIDs}) AND user2 = ${viewer.userID}) OR (user1 = ${viewer.userID} AND user2 IN (${userIDs})) UNION SELECT user1, user2, status FROM relationships_undirected WHERE (user1 = ${viewer.userID} AND user2 IN (${userIDs})) OR (user1 IN (${userIDs}) AND user2 = ${viewer.userID}) `; const [result] = await dbQuery(query); const relationshipsByUserId = _groupBy( ({ user1, user2 }) => (user1.toString() === viewer.userID ? user2 : user1), result, ); const errors: RelationshipErrors = {}; const userRelationshipOperations: UserRelationshipOperations = {}; for (const userID in relationshipsByUserId) { const relationships = relationshipsByUserId[userID]; const viewerBlockedTarget = relationships.some( relationship => relationship.status === directedStatus.BLOCKED && relationship.user1.toString() === viewer.userID, ); const targetBlockedViewer = relationships.some( relationship => relationship.status === directedStatus.BLOCKED && relationship.user2.toString() === viewer.userID, ); const friendshipExists = relationships.some( relationship => relationship.status === undirectedStatus.FRIEND, ); const viewerRequestedTargetFriendship = relationships.some( relationship => relationship.status === directedStatus.PENDING_FRIEND && relationship.user1.toString() === viewer.userID, ); const targetRequestedViewerFriendship = relationships.some( relationship => relationship.status === directedStatus.PENDING_FRIEND && relationship.user2.toString() === viewer.userID, ); const operations = []; if (targetBlockedViewer) { if (viewerBlockedTarget) { operations.push('delete_directed'); } const user_blocked = errors.user_blocked || []; errors.user_blocked = [...user_blocked, userID]; } else if (friendshipExists) { const already_friends = errors.already_friends || []; errors.already_friends = [...already_friends, userID]; } else if (targetRequestedViewerFriendship) { operations.push('friend', 'delete_directed'); } else if (!viewerRequestedTargetFriendship) { operations.push('pending_friend'); } userRelationshipOperations[userID] = operations; } for (const userID of userIDs) { if (!(userID in userRelationshipOperations)) { userRelationshipOperations[userID] = ['know_of', 'pending_friend']; } } return { errors, userRelationshipOperations }; } export { fetchFriendRequestRelationshipOperations }; diff --git a/keyserver/src/fetchers/report-fetchers.js b/keyserver/src/fetchers/report-fetchers.js index 5e65e6332..636984507 100644 --- a/keyserver/src/fetchers/report-fetchers.js +++ b/keyserver/src/fetchers/report-fetchers.js @@ -1,107 +1,107 @@ // @flow -import { isStaff } from 'lib/shared/user-utils'; +import { isStaff } from 'lib/shared/user-utils.js'; import { type FetchErrorReportInfosResponse, type FetchErrorReportInfosRequest, type ReduxToolsImport, reportTypes, -} from 'lib/types/report-types'; -import { ServerError } from 'lib/utils/errors'; -import { values } from 'lib/utils/objects'; +} from 'lib/types/report-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { values } from 'lib/utils/objects.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; async function fetchErrorReportInfos( viewer: Viewer, request: FetchErrorReportInfosRequest, ): Promise { if (!viewer.loggedIn || !isStaff(viewer.userID)) { throw new ServerError('invalid_credentials'); } const query = SQL` SELECT r.id, r.user, r.platform, r.report, r.creation_time, u.username FROM reports r LEFT JOIN users u ON u.id = r.user `; if (request.cursor) { query.append(SQL`WHERE r.id < ${request.cursor} `); } query.append(SQL`ORDER BY r.id DESC`); const [result] = await dbQuery(query); const reports = []; const userInfos = {}; for (const row of result) { const viewerID = row.user.toString(); const report = JSON.parse(row.report); let { platformDetails } = report; if (!platformDetails) { platformDetails = { platform: row.platform, codeVersion: report.codeVersion, stateVersion: report.stateVersion, }; } reports.push({ id: row.id.toString(), viewerID, platformDetails, creationTime: row.creation_time, }); if (row.username) { userInfos[viewerID] = { id: viewerID, username: row.username, }; } } return { reports, userInfos: values(userInfos) }; } async function fetchReduxToolsImport( viewer: Viewer, id: string, ): Promise { if (!viewer.loggedIn || !isStaff(viewer.userID)) { throw new ServerError('invalid_credentials'); } const query = SQL` SELECT user, report, creation_time FROM reports WHERE id = ${id} AND type = ${reportTypes.ERROR} `; const [result] = await dbQuery(query); if (result.length === 0) { throw new ServerError('invalid_parameters'); } const row = result[0]; const report = JSON.parse(row.report); const _persist = report.preloadedState._persist ? report.preloadedState._persist : {}; const navState = report.currentState && report.currentState.navState ? report.currentState.navState : undefined; return { preloadedState: { ...report.preloadedState, _persist: { ..._persist, // Setting this to false disables redux-persist rehydrated: false, }, navState, frozen: true, }, payload: report.actions, }; } export { fetchErrorReportInfos, fetchReduxToolsImport }; diff --git a/keyserver/src/fetchers/role-fetchers.js b/keyserver/src/fetchers/role-fetchers.js index 43288e57a..f83a23c26 100644 --- a/keyserver/src/fetchers/role-fetchers.js +++ b/keyserver/src/fetchers/role-fetchers.js @@ -1,28 +1,28 @@ // @flow -import type { RoleInfo } from 'lib/types/thread-types'; +import type { RoleInfo } from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function fetchRoles(threadID: string): Promise { const query = SQL` SELECT r.id, r.name, r.permissions, r.id = t.default_role AS is_default FROM roles r LEFT JOIN threads t ON t.id = r.thread WHERE r.thread = ${threadID} `; const [result] = await dbQuery(query); const roles = []; for (const row of result) { roles.push({ id: row.id.toString(), name: row.name, permissions: JSON.parse(row.permissions), isDefault: Boolean(row.is_default), }); } return roles; } export { fetchRoles }; diff --git a/keyserver/src/fetchers/session-fetchers.js b/keyserver/src/fetchers/session-fetchers.js index b3d77bc89..cc48c63c8 100644 --- a/keyserver/src/fetchers/session-fetchers.js +++ b/keyserver/src/fetchers/session-fetchers.js @@ -1,45 +1,45 @@ // @flow -import type { CalendarQuery } from 'lib/types/entry-types'; +import type { CalendarQuery } from 'lib/types/entry-types.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; type CalendarSessionResult = { userID: string, session: string, calendarQuery: CalendarQuery, }; async function fetchActiveSessionsForThread( threadID: string, ): Promise { const query = SQL` SELECT s.id, s.user, s.query FROM memberships m LEFT JOIN sessions s ON s.user = m.user WHERE m.thread = ${threadID} AND m.role > 0 AND s.query IS NOT NULL `; const [result] = await dbQuery(query); const filters = []; for (const row of result) { filters.push({ userID: row.user.toString(), session: row.id.toString(), calendarQuery: JSON.parse(row.query), }); } return filters; } async function fetchOtherSessionsForViewer(viewer: Viewer): Promise { const query = SQL` SELECT id FROM sessions WHERE user = ${viewer.userID} AND id != ${viewer.session} `; const [result] = await dbQuery(query); return result.map(row => row.id.toString()); } export { fetchActiveSessionsForThread, fetchOtherSessionsForViewer }; diff --git a/keyserver/src/fetchers/thread-fetchers.js b/keyserver/src/fetchers/thread-fetchers.js index 1a36e099d..7f2779de6 100644 --- a/keyserver/src/fetchers/thread-fetchers.js +++ b/keyserver/src/fetchers/thread-fetchers.js @@ -1,262 +1,262 @@ // @flow -import { getAllThreadPermissions } from 'lib/permissions/thread-permissions'; +import { getAllThreadPermissions } from 'lib/permissions/thread-permissions.js'; import { rawThreadInfoFromServerThreadInfo, getContainingThreadID, getCommunity, -} from 'lib/shared/thread-utils'; -import { hasMinCodeVersion } from 'lib/shared/version-utils'; -import type { RawMessageInfo, MessageInfo } from 'lib/types/message-types'; +} from 'lib/shared/thread-utils.js'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; +import type { RawMessageInfo, MessageInfo } from 'lib/types/message-types.js'; import { threadTypes, type ThreadType, type RawThreadInfo, type ServerThreadInfo, -} from 'lib/types/thread-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/thread-types.js'; +import { ServerError } from 'lib/utils/errors.js'; -import { dbQuery, SQL } from '../database/database'; -import type { SQLStatementType } from '../database/types'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { SQLStatementType } from '../database/types.js'; +import type { Viewer } from '../session/viewer.js'; type FetchServerThreadInfosResult = { +threadInfos: { +[id: string]: ServerThreadInfo }, }; async function fetchServerThreadInfos( condition?: SQLStatementType, ): Promise { const whereClause = condition ? SQL`WHERE `.append(condition) : ''; const query = SQL` SELECT t.id, t.name, t.parent_thread_id, t.containing_thread_id, t.community, t.depth, t.color, t.description, t.type, t.creation_time, t.default_role, t.source_message, t.replies_count, r.id AS role, r.name AS role_name, r.permissions AS role_permissions, m.user, m.permissions, m.subscription, m.last_read_message < m.last_message AS unread, m.sender FROM threads t LEFT JOIN ( SELECT thread, id, name, permissions FROM roles UNION SELECT id AS thread, 0 AS id, NULL AS name, NULL AS permissions FROM threads ) r ON r.thread = t.id LEFT JOIN memberships m ON m.role = r.id AND m.thread = t.id AND m.role >= 0 ` .append(whereClause) .append(SQL` ORDER BY m.user ASC`); const [result] = await dbQuery(query); const threadInfos = {}; for (const row of result) { const threadID = row.id.toString(); if (!threadInfos[threadID]) { threadInfos[threadID] = { id: threadID, type: row.type, name: row.name ? row.name : '', description: row.description ? row.description : '', color: row.color, creationTime: row.creation_time, parentThreadID: row.parent_thread_id ? row.parent_thread_id.toString() : null, containingThreadID: row.containing_thread_id ? row.containing_thread_id.toString() : null, depth: row.depth, community: row.community ? row.community.toString() : null, members: [], roles: {}, repliesCount: row.replies_count, }; } const sourceMessageID = row.source_message?.toString(); if (sourceMessageID) { threadInfos[threadID].sourceMessageID = sourceMessageID; } const role = row.role.toString(); if (row.role && !threadInfos[threadID].roles[role]) { threadInfos[threadID].roles[role] = { id: role, name: row.role_name, permissions: JSON.parse(row.role_permissions), isDefault: role === row.default_role.toString(), }; } if (row.user) { const userID = row.user.toString(); const allPermissions = getAllThreadPermissions( JSON.parse(row.permissions), threadID, ); threadInfos[threadID].members.push({ id: userID, permissions: allPermissions, role: row.role ? role : null, subscription: JSON.parse(row.subscription), unread: row.role ? !!row.unread : null, isSender: !!row.sender, }); } } return { threadInfos }; } export type FetchThreadInfosResult = { +threadInfos: { +[id: string]: RawThreadInfo }, }; async function fetchThreadInfos( viewer: Viewer, condition?: SQLStatementType, ): Promise { const serverResult = await fetchServerThreadInfos(condition); return rawThreadInfosFromServerThreadInfos(viewer, serverResult); } const shimCommunityRoot = { [threadTypes.COMMUNITY_ROOT]: threadTypes.COMMUNITY_SECRET_SUBTHREAD, [threadTypes.COMMUNITY_ANNOUNCEMENT_ROOT]: threadTypes.COMMUNITY_SECRET_SUBTHREAD, [threadTypes.COMMUNITY_OPEN_ANNOUNCEMENT_SUBTHREAD]: threadTypes.COMMUNITY_OPEN_SUBTHREAD, [threadTypes.COMMUNITY_SECRET_ANNOUNCEMENT_SUBTHREAD]: threadTypes.COMMUNITY_SECRET_SUBTHREAD, [threadTypes.GENESIS]: threadTypes.COMMUNITY_SECRET_SUBTHREAD, }; function rawThreadInfosFromServerThreadInfos( viewer: Viewer, serverResult: FetchServerThreadInfosResult, ): FetchThreadInfosResult { const viewerID = viewer.id; const hasCodeVersionBelow70 = !hasMinCodeVersion(viewer.platformDetails, 70); const hasCodeVersionBelow87 = !hasMinCodeVersion(viewer.platformDetails, 87); const hasCodeVersionBelow102 = !hasMinCodeVersion( viewer.platformDetails, 102, ); const hasCodeVersionBelow104 = !hasMinCodeVersion( viewer.platformDetails, 104, ); const threadInfos = {}; for (const threadID in serverResult.threadInfos) { const serverThreadInfo = serverResult.threadInfos[threadID]; const threadInfo = rawThreadInfoFromServerThreadInfo( serverThreadInfo, viewerID, { includeVisibilityRules: hasCodeVersionBelow70, filterMemberList: hasCodeVersionBelow70, shimThreadTypes: hasCodeVersionBelow87 ? shimCommunityRoot : null, hideThreadStructure: hasCodeVersionBelow102, filterDetailedThreadEditPermissions: hasCodeVersionBelow104, }, ); if (threadInfo) { threadInfos[threadID] = threadInfo; } } return { threadInfos }; } async function verifyThreadIDs( threadIDs: $ReadOnlyArray, ): Promise<$ReadOnlyArray> { if (threadIDs.length === 0) { return []; } const query = SQL`SELECT id FROM threads WHERE id IN (${threadIDs})`; const [result] = await dbQuery(query); const verified = []; for (const row of result) { verified.push(row.id.toString()); } return verified; } async function verifyThreadID(threadID: string): Promise { const result = await verifyThreadIDs([threadID]); return result.length !== 0; } type ThreadAncestry = { +containingThreadID: ?string, +community: ?string, +depth: number, }; async function determineThreadAncestry( parentThreadID: ?string, threadType: ThreadType, ): Promise { if (!parentThreadID) { return { containingThreadID: null, community: null, depth: 0 }; } const parentThreadInfos = await fetchServerThreadInfos( SQL`t.id = ${parentThreadID}`, ); const parentThreadInfo = parentThreadInfos.threadInfos[parentThreadID]; if (!parentThreadInfo) { throw new ServerError('invalid_parameters'); } const containingThreadID = getContainingThreadID( parentThreadInfo, threadType, ); const community = getCommunity(parentThreadInfo); const depth = parentThreadInfo.depth + 1; return { containingThreadID, community, depth }; } function personalThreadQuery( firstMemberID: string, secondMemberID: string, ): SQLStatementType { return SQL` SELECT t.id FROM threads t INNER JOIN memberships m1 ON m1.thread = t.id AND m1.user = ${firstMemberID} INNER JOIN memberships m2 ON m2.thread = t.id AND m2.user = ${secondMemberID} WHERE t.type = ${threadTypes.PERSONAL} AND m1.role > 0 AND m2.role > 0 `; } async function fetchPersonalThreadID( viewerID: string, otherMemberID: string, ): Promise { const query = personalThreadQuery(viewerID, otherMemberID); const [threads] = await dbQuery(query); return threads[0]?.id.toString(); } async function serverThreadInfoFromMessageInfo( message: RawMessageInfo | MessageInfo, ): Promise { const threadID = message.threadID; const threads = await fetchServerThreadInfos(SQL`t.id = ${threadID}`); return threads.threadInfos[threadID]; } export { fetchServerThreadInfos, fetchThreadInfos, rawThreadInfosFromServerThreadInfos, verifyThreadIDs, verifyThreadID, determineThreadAncestry, personalThreadQuery, fetchPersonalThreadID, serverThreadInfoFromMessageInfo, }; diff --git a/keyserver/src/fetchers/thread-permission-fetchers.js b/keyserver/src/fetchers/thread-permission-fetchers.js index 025dc585d..14528efea 100644 --- a/keyserver/src/fetchers/thread-permission-fetchers.js +++ b/keyserver/src/fetchers/thread-permission-fetchers.js @@ -1,366 +1,366 @@ // @flow import { permissionLookup, makePermissionsBlob, getRoleForPermissions, -} from 'lib/permissions/thread-permissions'; -import { relationshipBlockedInEitherDirection } from 'lib/shared/relationship-utils'; +} from 'lib/permissions/thread-permissions.js'; +import { relationshipBlockedInEitherDirection } from 'lib/shared/relationship-utils.js'; import { threadFrozenDueToBlock, permissionsDisabledByBlock, -} from 'lib/shared/thread-utils'; -import { userRelationshipStatus } from 'lib/types/relationship-types'; +} from 'lib/shared/thread-utils.js'; +import { userRelationshipStatus } from 'lib/types/relationship-types.js'; import type { ThreadType, ThreadPermission, ThreadPermissionsBlob, ThreadRolePermissionsBlob, -} from 'lib/types/thread-types'; +} from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; -import { fetchThreadInfos } from './thread-fetchers'; -import { fetchKnownUserInfos } from './user-fetchers'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; +import { fetchThreadInfos } from './thread-fetchers.js'; +import { fetchKnownUserInfos } from './user-fetchers.js'; // Note that it's risky to verify permissions by inspecting the blob directly. // There are other factors that can override permissions in the permissions // blob, such as when one user blocks another. It's always better to go through // checkThreads and friends, or by looking at the ThreadInfo through // threadHasPermission. async function fetchThreadPermissionsBlob( viewer: Viewer, threadID: string, ): Promise { const viewerID = viewer.id; const query = SQL` SELECT permissions FROM memberships WHERE thread = ${threadID} AND user = ${viewerID} `; const [result] = await dbQuery(query); if (result.length === 0) { return null; } const row = result[0]; return JSON.parse(row.permissions); } function checkThreadPermission( viewer: Viewer, threadID: string, permission: ThreadPermission, ): Promise { return checkThread(viewer, threadID, [{ check: 'permission', permission }]); } function viewerIsMember(viewer: Viewer, threadID: string): Promise { return checkThread(viewer, threadID, [{ check: 'is_member' }]); } type Check = | { +check: 'is_member' } | { +check: 'permission', +permission: ThreadPermission }; function isThreadValid( permissions: ?ThreadPermissionsBlob, role: number, checks: $ReadOnlyArray, ): boolean { for (const check of checks) { if (check.check === 'is_member') { if (role <= 0) { return false; } } else if (check.check === 'permission') { if (!permissionLookup(permissions, check.permission)) { return false; } } } return true; } async function checkThreads( viewer: Viewer, threadIDs: $ReadOnlyArray, checks: $ReadOnlyArray, ): Promise> { if (viewer.isScriptViewer) { // script viewers are all-powerful return new Set(threadIDs); } const threadRows = await getValidThreads(viewer, threadIDs, checks); return new Set(threadRows.map(row => row.threadID)); } type PartialMembershipRow = { +threadID: string, +role: number, +permissions: ThreadPermissionsBlob, }; async function getValidThreads( viewer: Viewer, threadIDs: $ReadOnlyArray, checks: $ReadOnlyArray, ): Promise { const query = SQL` SELECT thread AS threadID, permissions, role FROM memberships WHERE thread IN (${threadIDs}) AND user = ${viewer.userID} `; const permissionsToCheck = []; for (const check of checks) { if (check.check === 'permission') { permissionsToCheck.push(check.permission); } } const [[result], disabledThreadIDs] = await Promise.all([ dbQuery(query), checkThreadsFrozen(viewer, permissionsToCheck, threadIDs), ]); return result .map(row => ({ ...row, threadID: row.threadID.toString(), permissions: JSON.parse(row.permissions), })) .filter( row => isThreadValid(row.permissions, row.role, checks) && !disabledThreadIDs.has(row.threadID), ); } async function checkThreadsFrozen( viewer: Viewer, permissionsToCheck: $ReadOnlyArray, threadIDs: $ReadOnlyArray, ) { const threadIDsWithDisabledPermissions = new Set(); const permissionMightBeDisabled = permissionsToCheck.some(permission => permissionsDisabledByBlock.has(permission), ); if (!permissionMightBeDisabled) { return threadIDsWithDisabledPermissions; } const [{ threadInfos }, userInfos] = await Promise.all([ fetchThreadInfos(viewer, SQL`t.id IN (${[...threadIDs]})`), fetchKnownUserInfos(viewer), ]); for (const threadID in threadInfos) { const blockedThread = threadFrozenDueToBlock( threadInfos[threadID], viewer.id, userInfos, ); if (blockedThread) { threadIDsWithDisabledPermissions.add(threadID); } } return threadIDsWithDisabledPermissions; } async function checkIfThreadIsBlocked( viewer: Viewer, threadID: string, permission: ThreadPermission, ): Promise { const disabledThreadIDs = await checkThreadsFrozen( viewer, [permission], [threadID], ); return disabledThreadIDs.has(threadID); } async function checkThread( viewer: Viewer, threadID: string, checks: $ReadOnlyArray, ): Promise { const validThreads = await checkThreads(viewer, [threadID], checks); return validThreads.has(threadID); } // We pass this into getRoleForPermissions in order to check if a hypothetical // permissions blob would block membership by returning a non-positive result. // It doesn't matter what value we pass in, as long as it's positive. const arbitraryPositiveRole = '1'; type CandidateMembers = { +[key: string]: ?$ReadOnlyArray, ... }; type ValidateCandidateMembersParams = { +threadType: ThreadType, +parentThreadID: ?string, +containingThreadID: ?string, +defaultRolePermissions: ThreadRolePermissionsBlob, }; type ValidateCandidateMembersOptions = { +requireRelationship?: boolean }; async function validateCandidateMembers( viewer: Viewer, candidates: CandidateMembers, params: ValidateCandidateMembersParams, options?: ValidateCandidateMembersOptions, ): Promise { const requireRelationship = options?.requireRelationship ?? true; const allCandidatesSet = new Set(); for (const key in candidates) { const candidateGroup = candidates[key]; if (!candidateGroup) { continue; } for (const candidate of candidateGroup) { allCandidatesSet.add(candidate); } } const allCandidates = [...allCandidatesSet]; const fetchMembersPromise = fetchKnownUserInfos(viewer, allCandidates); const parentPermissionsPromise = (async () => { const parentPermissions = {}; if (!params.parentThreadID || allCandidates.length === 0) { return parentPermissions; } const parentPermissionsQuery = SQL` SELECT user, permissions FROM memberships WHERE thread = ${params.parentThreadID} AND user IN (${allCandidates}) `; const [result] = await dbQuery(parentPermissionsQuery); for (const row of result) { parentPermissions[row.user.toString()] = JSON.parse(row.permissions); } return parentPermissions; })(); const memberOfContainingThreadPromise: Promise< Map, > = (async () => { const results = new Map(); if (allCandidates.length === 0) { return results; } if (!params.containingThreadID) { for (const userID of allCandidates) { results.set(userID, 'no-containing-thread'); } return results; } const memberOfContainingThreadQuery = SQL` SELECT user, role AS containing_role FROM memberships WHERE thread = ${params.containingThreadID} AND user IN (${allCandidates}) `; const [result] = await dbQuery(memberOfContainingThreadQuery); for (const row of result) { results.set( row.user.toString(), row.containing_role > 0 ? 'member' : 'non-member', ); } return results; })(); const [ fetchedMembers, parentPermissions, memberOfContainingThread, ] = await Promise.all([ fetchMembersPromise, parentPermissionsPromise, memberOfContainingThreadPromise, ]); const ignoreMembers = new Set(); for (const memberID of allCandidates) { const member = fetchedMembers[memberID]; if (!member && requireRelationship) { ignoreMembers.add(memberID); continue; } const relationshipStatus = member?.relationshipStatus; const memberRelationshipHasBlock = !!( relationshipStatus && relationshipBlockedInEitherDirection(relationshipStatus) ); if (memberRelationshipHasBlock) { ignoreMembers.add(memberID); continue; } const permissionsFromParent = parentPermissions[memberID]; if (memberOfContainingThread.get(memberID) === 'non-member') { ignoreMembers.add(memberID); continue; } if ( memberOfContainingThread.get(memberID) === 'no-containing-thread' && relationshipStatus !== userRelationshipStatus.FRIEND && requireRelationship ) { ignoreMembers.add(memberID); continue; } const permissions = makePermissionsBlob( params.defaultRolePermissions, permissionsFromParent, '-1', params.threadType, ); if (!permissions) { ignoreMembers.add(memberID); continue; } const targetRole = getRoleForPermissions( arbitraryPositiveRole, permissions, ); if (Number(targetRole) <= 0) { ignoreMembers.add(memberID); continue; } } if (ignoreMembers.size === 0) { return candidates; } const result = {}; for (const key in candidates) { const candidateGroup = candidates[key]; if (!candidateGroup) { result[key] = candidates[key]; continue; } result[key] = []; for (const candidate of candidateGroup) { if (!ignoreMembers.has(candidate)) { result[key].push(candidate); } } } return result; } export { fetchThreadPermissionsBlob, checkThreadPermission, viewerIsMember, checkThreads, getValidThreads, checkThread, checkIfThreadIsBlocked, validateCandidateMembers, }; diff --git a/keyserver/src/fetchers/update-fetchers.js b/keyserver/src/fetchers/update-fetchers.js index c2e89886a..0d33e07f7 100644 --- a/keyserver/src/fetchers/update-fetchers.js +++ b/keyserver/src/fetchers/update-fetchers.js @@ -1,173 +1,173 @@ // @flow import invariant from 'invariant'; -import type { CalendarQuery } from 'lib/types/entry-types'; +import type { CalendarQuery } from 'lib/types/entry-types.js'; import { type RawUpdateInfo, updateTypes, assertUpdateType, -} from 'lib/types/update-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/update-types.js'; +import { ServerError } from 'lib/utils/errors.js'; -import type { ViewerInfo } from '../creators/update-creator'; +import type { ViewerInfo } from '../creators/update-creator.js'; import { type FetchUpdatesResult, fetchUpdateInfosWithRawUpdateInfos, -} from '../creators/update-creator'; -import { dbQuery, SQL } from '../database/database'; -import type { SQLStatementType } from '../database/types'; -import type { Viewer } from '../session/viewer'; +} from '../creators/update-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import type { SQLStatementType } from '../database/types.js'; +import type { Viewer } from '../session/viewer.js'; const defaultUpdateFetchResult = { updateInfos: [], userInfos: {} }; async function fetchUpdateInfosWithQuery( viewerInfo: ViewerInfo, query: SQLStatementType, ): Promise { if (!viewerInfo.viewer.loggedIn) { throw new ServerError('not_logged_in'); } if (viewerInfo.viewer.isScriptViewer) { return defaultUpdateFetchResult; } const [result] = await dbQuery(query); const rawUpdateInfos = []; for (const row of result) { rawUpdateInfos.push(rawUpdateInfoFromRow(row)); } return await fetchUpdateInfosWithRawUpdateInfos(rawUpdateInfos, viewerInfo); } function fetchUpdateInfos( viewer: Viewer, currentAsOf: number, calendarQuery: CalendarQuery, ): Promise { const query = SQL` SELECT id, type, content, time FROM updates WHERE user = ${viewer.id} AND time > ${currentAsOf} AND (updater IS NULL OR updater != ${viewer.session}) AND (target IS NULL OR target = ${viewer.session}) ORDER BY time ASC `; return fetchUpdateInfosWithQuery({ viewer, calendarQuery }, query); } function rawUpdateInfoFromRow(row: Object): RawUpdateInfo { const type = assertUpdateType(row.type); if (type === updateTypes.DELETE_ACCOUNT) { const content = JSON.parse(row.content); return { type: updateTypes.DELETE_ACCOUNT, id: row.id.toString(), time: row.time, deletedUserID: content.deletedUserID, }; } else if (type === updateTypes.UPDATE_THREAD) { const { threadID } = JSON.parse(row.content); return { type: updateTypes.UPDATE_THREAD, id: row.id.toString(), time: row.time, threadID, }; } else if (type === updateTypes.UPDATE_THREAD_READ_STATUS) { const { threadID, unread } = JSON.parse(row.content); return { type: updateTypes.UPDATE_THREAD_READ_STATUS, id: row.id.toString(), time: row.time, threadID, unread, }; } else if (type === updateTypes.DELETE_THREAD) { const { threadID } = JSON.parse(row.content); return { type: updateTypes.DELETE_THREAD, id: row.id.toString(), time: row.time, threadID, }; } else if (type === updateTypes.JOIN_THREAD) { const { threadID } = JSON.parse(row.content); return { type: updateTypes.JOIN_THREAD, id: row.id.toString(), time: row.time, threadID, }; } else if (type === updateTypes.BAD_DEVICE_TOKEN) { const { deviceToken } = JSON.parse(row.content); return { type: updateTypes.BAD_DEVICE_TOKEN, id: row.id.toString(), time: row.time, deviceToken, }; } else if (type === updateTypes.UPDATE_ENTRY) { const { entryID } = JSON.parse(row.content); return { type: updateTypes.UPDATE_ENTRY, id: row.id.toString(), time: row.time, entryID, }; } else if (type === updateTypes.UPDATE_CURRENT_USER) { return { type: updateTypes.UPDATE_CURRENT_USER, id: row.id.toString(), time: row.time, }; } else if (type === updateTypes.UPDATE_USER) { const content = JSON.parse(row.content); return { type: updateTypes.UPDATE_USER, id: row.id.toString(), time: row.time, updatedUserID: content.updatedUserID, }; } invariant(false, `unrecognized updateType ${type}`); } const entryIDExtractString = '$.entryID'; function fetchUpdateInfoForEntryUpdate( viewer: Viewer, entryID: string, ): Promise { const query = SQL` SELECT id, type, content, time FROM updates WHERE user = ${viewer.id} AND type = ${updateTypes.UPDATE_ENTRY} AND JSON_EXTRACT(content, ${entryIDExtractString}) = ${entryID} ORDER BY time DESC LIMIT 1 `; return fetchUpdateInfosWithQuery({ viewer }, query); } const threadIDExtractString = '$.threadID'; function fetchUpdateInfoForThreadDeletion( viewer: Viewer, threadID: string, ): Promise { const query = SQL` SELECT id, type, content, time FROM updates WHERE user = ${viewer.id} AND type = ${updateTypes.DELETE_THREAD} AND JSON_EXTRACT(content, ${threadIDExtractString}) = ${threadID} ORDER BY time DESC LIMIT 1 `; return fetchUpdateInfosWithQuery({ viewer }, query); } export { fetchUpdateInfos, fetchUpdateInfoForEntryUpdate, fetchUpdateInfoForThreadDeletion, }; diff --git a/keyserver/src/fetchers/upload-fetchers.js b/keyserver/src/fetchers/upload-fetchers.js index 68382740d..6920b7216 100644 --- a/keyserver/src/fetchers/upload-fetchers.js +++ b/keyserver/src/fetchers/upload-fetchers.js @@ -1,209 +1,209 @@ // @flow -import _keyBy from 'lodash/fp/keyBy'; +import _keyBy from 'lodash/fp/keyBy.js'; -import type { Media } from 'lib/types/media-types'; +import type { Media } from 'lib/types/media-types.js'; import type { MediaMessageServerDBContent } from 'lib/types/messages/media.js'; import { getUploadIDsFromMediaMessageServerDBContents } from 'lib/types/messages/media.js'; -import { ServerError } from 'lib/utils/errors'; +import { ServerError } from 'lib/utils/errors.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; -import { getAndAssertCommAppURLFacts } from '../utils/urls'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; +import { getAndAssertCommAppURLFacts } from '../utils/urls.js'; type UploadInfo = { content: Buffer, mime: string, }; async function fetchUpload( viewer: Viewer, id: string, secret: string, ): Promise { const query = SQL` SELECT content, mime FROM uploads WHERE id = ${id} AND secret = ${secret} `; const [result] = await dbQuery(query); if (result.length === 0) { throw new ServerError('invalid_parameters'); } const [row] = result; const { content, mime } = row; return { content, mime }; } async function fetchUploadChunk( id: string, secret: string, pos: number, len: number, ): Promise { // We use pos + 1 because SQL is 1-indexed whereas js is 0-indexed const query = SQL` SELECT SUBSTRING(content, ${pos + 1}, ${len}) AS content, mime FROM uploads WHERE id = ${id} AND secret = ${secret} `; const [result] = await dbQuery(query); if (result.length === 0) { throw new ServerError('invalid_parameters'); } const [row] = result; const { content, mime } = row; return { content, mime, }; } // Returns total size in bytes. async function getUploadSize(id: string, secret: string): Promise { const query = SQL` SELECT LENGTH(content) AS length FROM uploads WHERE id = ${id} AND secret = ${secret} `; const [result] = await dbQuery(query); if (result.length === 0) { throw new ServerError('invalid_parameters'); } const [row] = result; const { length } = row; return length; } function getUploadURL(id: string, secret: string): string { const { baseDomain, basePath } = getAndAssertCommAppURLFacts(); return `${baseDomain}${basePath}upload/${id}/${secret}`; } function mediaFromRow(row: Object): Media { const uploadExtra = JSON.parse(row.uploadExtra); const { width, height, loop } = uploadExtra; const { uploadType: type, uploadSecret: secret } = row; const id = row.uploadID.toString(); const dimensions = { width, height }; const uri = getUploadURL(id, secret); if (type === 'photo') { return { id, type: 'photo', uri, dimensions }; } else if (loop) { // $FlowFixMe add thumbnailID, thumbnailURI once they're in DB return { id, type: 'video', uri, dimensions, loop }; } else { // $FlowFixMe add thumbnailID, thumbnailURI once they're in DB return { id, type: 'video', uri, dimensions }; } } async function fetchMedia( viewer: Viewer, mediaIDs: $ReadOnlyArray, ): Promise<$ReadOnlyArray> { const query = SQL` SELECT id AS uploadID, secret AS uploadSecret, type AS uploadType, extra AS uploadExtra FROM uploads WHERE id IN (${mediaIDs}) AND uploader = ${viewer.id} AND container IS NULL `; const [result] = await dbQuery(query); return result.map(mediaFromRow); } async function fetchUploadsForMessage( viewer: Viewer, mediaMessageContents: $ReadOnlyArray, ): Promise<$ReadOnlyArray> { const uploadIDs = getUploadIDsFromMediaMessageServerDBContents( mediaMessageContents, ); const query = SQL` SELECT id AS uploadID, secret AS uploadSecret, type AS uploadType, extra AS uploadExtra FROM uploads WHERE id IN (${uploadIDs}) AND uploader = ${viewer.id} AND container IS NULL `; const [uploads] = await dbQuery(query); return uploads; } async function fetchMediaFromMediaMessageContent( viewer: Viewer, mediaMessageContents: $ReadOnlyArray, ): Promise<$ReadOnlyArray> { const uploads = await fetchUploadsForMessage(viewer, mediaMessageContents); return constructMediaFromMediaMessageContentsAndUploadRows( mediaMessageContents, uploads, ); } function constructMediaFromMediaMessageContentsAndUploadRows( mediaMessageContents: $ReadOnlyArray, uploadRows: $ReadOnlyArray, ): $ReadOnlyArray { const uploadMap = _keyBy('uploadID')(uploadRows); const media: Media[] = []; for (const mediaMessageContent of mediaMessageContents) { const primaryUploadID = mediaMessageContent.uploadID; const primaryUpload = uploadMap[primaryUploadID]; const primaryUploadSecret = primaryUpload.uploadSecret; const primaryUploadURI = getUploadURL(primaryUploadID, primaryUploadSecret); const uploadExtra = JSON.parse(primaryUpload.uploadExtra); const { width, height, loop } = uploadExtra; const dimensions = { width, height }; if (mediaMessageContent.type === 'photo') { media.push({ type: 'photo', id: primaryUploadID, uri: primaryUploadURI, dimensions, }); continue; } const thumbnailUploadID = mediaMessageContent.thumbnailUploadID; const thumbnailUpload = uploadMap[thumbnailUploadID]; const thumbnailUploadSecret = thumbnailUpload.uploadSecret; const thumbnailUploadURI = getUploadURL( thumbnailUploadID, thumbnailUploadSecret, ); const video = { type: 'video', id: primaryUploadID, uri: primaryUploadURI, dimensions, thumbnailID: thumbnailUploadID, thumbnailURI: thumbnailUploadURI, }; media.push(loop ? { ...video, loop } : video); } return media; } export { fetchUpload, fetchUploadChunk, getUploadSize, getUploadURL, mediaFromRow, fetchMedia, fetchMediaFromMediaMessageContent, constructMediaFromMediaMessageContentsAndUploadRows, }; diff --git a/keyserver/src/fetchers/user-fetchers.js b/keyserver/src/fetchers/user-fetchers.js index 3d0bf9c78..c132e018e 100644 --- a/keyserver/src/fetchers/user-fetchers.js +++ b/keyserver/src/fetchers/user-fetchers.js @@ -1,316 +1,316 @@ // @flow -import { hasMinCodeVersion } from 'lib/shared/version-utils'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; import { undirectedStatus, directedStatus, userRelationshipStatus, -} from 'lib/types/relationship-types'; +} from 'lib/types/relationship-types.js'; import { communityThreadTypes, threadPermissions, -} from 'lib/types/thread-types'; +} from 'lib/types/thread-types.js'; import type { UserInfos, CurrentUserInfo, OldCurrentUserInfo, LoggedInUserInfo, OldLoggedInUserInfo, GlobalUserInfo, -} from 'lib/types/user-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/user-types.js'; +import { ServerError } from 'lib/utils/errors.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; async function fetchUserInfos( userIDs: string[], ): Promise<{ [id: string]: GlobalUserInfo }> { if (userIDs.length <= 0) { return {}; } const query = SQL` SELECT id, username FROM users WHERE id IN (${userIDs}) `; const [result] = await dbQuery(query); const userInfos = {}; for (const row of result) { const id = row.id.toString(); userInfos[id] = { id, username: row.username, }; } for (const userID of userIDs) { if (!userInfos[userID]) { userInfos[userID] = { id: userID, username: null, }; } } return userInfos; } async function fetchKnownUserInfos( viewer: Viewer, userIDs?: $ReadOnlyArray, ): Promise { if (!viewer.loggedIn) { return {}; } if (userIDs && userIDs.length === 0) { return {}; } const query = SQL` SELECT ru.user1, ru.user2, u.username, ru.status AS undirected_status, rd1.status AS user1_directed_status, rd2.status AS user2_directed_status FROM relationships_undirected ru LEFT JOIN relationships_directed rd1 ON rd1.user1 = ru.user1 AND rd1.user2 = ru.user2 LEFT JOIN relationships_directed rd2 ON rd2.user1 = ru.user2 AND rd2.user2 = ru.user1 LEFT JOIN users u ON u.id != ${viewer.userID} AND (u.id = ru.user1 OR u.id = ru.user2) `; if (userIDs) { query.append(SQL` WHERE (ru.user1 = ${viewer.userID} AND ru.user2 IN (${userIDs})) OR (ru.user1 IN (${userIDs}) AND ru.user2 = ${viewer.userID}) `); } else { query.append(SQL` WHERE ru.user1 = ${viewer.userID} OR ru.user2 = ${viewer.userID} `); } query.append(SQL` UNION SELECT id AS user1, NULL AS user2, username, CAST(NULL AS UNSIGNED) AS undirected_status, CAST(NULL AS UNSIGNED) AS user1_directed_status, CAST(NULL AS UNSIGNED) AS user2_directed_status FROM users WHERE id = ${viewer.userID} `); const [result] = await dbQuery(query); const userInfos = {}; for (const row of result) { const user1 = row.user1.toString(); const user2 = row.user2 ? row.user2.toString() : null; const id = user1 === viewer.userID && user2 ? user2 : user1; const userInfo = { id, username: row.username, }; if (!user2) { userInfos[id] = userInfo; continue; } let viewerDirectedStatus; let targetDirectedStatus; if (user1 === viewer.userID) { viewerDirectedStatus = row.user1_directed_status; targetDirectedStatus = row.user2_directed_status; } else { viewerDirectedStatus = row.user2_directed_status; targetDirectedStatus = row.user1_directed_status; } const viewerBlockedTarget = viewerDirectedStatus === directedStatus.BLOCKED; const targetBlockedViewer = targetDirectedStatus === directedStatus.BLOCKED; const friendshipExists = row.undirected_status === undirectedStatus.FRIEND; const viewerRequestedTargetFriendship = viewerDirectedStatus === directedStatus.PENDING_FRIEND; const targetRequestedViewerFriendship = targetDirectedStatus === directedStatus.PENDING_FRIEND; let relationshipStatus; if (viewerBlockedTarget && targetBlockedViewer) { relationshipStatus = userRelationshipStatus.BOTH_BLOCKED; } else if (targetBlockedViewer) { relationshipStatus = userRelationshipStatus.BLOCKED_VIEWER; } else if (viewerBlockedTarget) { relationshipStatus = userRelationshipStatus.BLOCKED_BY_VIEWER; } else if (friendshipExists) { relationshipStatus = userRelationshipStatus.FRIEND; } else if (targetRequestedViewerFriendship) { relationshipStatus = userRelationshipStatus.REQUEST_RECEIVED; } else if (viewerRequestedTargetFriendship) { relationshipStatus = userRelationshipStatus.REQUEST_SENT; } userInfos[id] = userInfo; if (relationshipStatus) { userInfos[id].relationshipStatus = relationshipStatus; } if (relationshipStatus && !row.username) { console.warn( `user ${viewer.userID} has ${relationshipStatus} relationship with ` + `anonymous user ${id}`, ); } } return userInfos; } async function verifyUserIDs( userIDs: $ReadOnlyArray, ): Promise { if (userIDs.length === 0) { return []; } const query = SQL`SELECT id FROM users WHERE id IN (${userIDs})`; const [result] = await dbQuery(query); return result.map(row => row.id.toString()); } async function verifyUserOrCookieIDs( ids: $ReadOnlyArray, ): Promise { if (ids.length === 0) { return []; } const query = SQL` SELECT id FROM users WHERE id IN (${ids}) UNION SELECT id FROM cookies WHERE id IN (${ids}) `; const [result] = await dbQuery(query); return result.map(row => row.id.toString()); } async function fetchCurrentUserInfo( viewer: Viewer, ): Promise { if (!viewer.loggedIn) { return ({ id: viewer.cookieID, anonymous: true }: CurrentUserInfo); } const currentUserInfo = await fetchLoggedInUserInfo(viewer); return currentUserInfo; } async function fetchLoggedInUserInfo( viewer: Viewer, ): Promise { const userQuery = SQL` SELECT id, username FROM users WHERE id = ${viewer.userID} `; const settingsQuery = SQL` SELECT name, data FROM settings WHERE user = ${viewer.userID} `; const [[userResult], [settingsResult]] = await Promise.all([ dbQuery(userQuery), dbQuery(settingsQuery), ]); const [userRow] = userResult; const stillExpectsEmailFields = !hasMinCodeVersion( viewer.platformDetails, 87, ); if (!userRow) { throw new ServerError('unknown_error'); } const id = userRow.id.toString(); const { username } = userRow; if (stillExpectsEmailFields) { return { id, username, email: 'removed from DB', emailVerified: true, }; } const featureGateSettings = !hasMinCodeVersion(viewer.platformDetails, 1000); if (featureGateSettings) { return { id, username }; } const settings = settingsResult.reduce((prev, curr) => { prev[curr.name] = curr.data; return prev; }, {}); return { id, username, settings }; } async function fetchAllUserIDs(): Promise { const query = SQL`SELECT id FROM users`; const [result] = await dbQuery(query); return result.map(row => row.id.toString()); } async function fetchUsername(id: string): Promise { const query = SQL`SELECT username FROM users WHERE id = ${id}`; const [result] = await dbQuery(query); if (result.length === 0) { return null; } const row = result[0]; return row.username; } async function fetchKeyserverAdminID(): Promise { const changeRoleExtractString = `$.${threadPermissions.CHANGE_ROLE}`; const query = SQL` SELECT m.user FROM memberships m INNER JOIN roles r ON m.role = r.id INNER JOIN threads t ON r.thread = t.id WHERE r.name = "Admins" AND t.type IN (${communityThreadTypes}) AND JSON_EXTRACT(r.permissions, ${changeRoleExtractString}) IS TRUE `; const [result] = await dbQuery(query); if (result.length === 0) { return null; } if (result.length > 1) { console.warn('more than one community admin found'); } return result[0].user; } async function fetchUserIDForEthereumAddress( address: string, ): Promise { const query = SQL` SELECT id FROM users WHERE LCASE(ethereum_address) = LCASE(${address}) `; const [result] = await dbQuery(query); return result.length === 0 ? null : result[0].id.toString(); } export { fetchUserInfos, fetchLoggedInUserInfo, verifyUserIDs, verifyUserOrCookieIDs, fetchCurrentUserInfo, fetchAllUserIDs, fetchUsername, fetchKnownUserInfos, fetchKeyserverAdminID, fetchUserIDForEthereumAddress, }; diff --git a/keyserver/src/keyserver.js b/keyserver/src/keyserver.js index 5dec1ca71..11eb5c4aa 100644 --- a/keyserver/src/keyserver.js +++ b/keyserver/src/keyserver.js @@ -1,175 +1,175 @@ // @flow import cluster from 'cluster'; import cookieParser from 'cookie-parser'; import express from 'express'; import expressWs from 'express-ws'; import os from 'os'; -import './cron/cron'; -import { migrate } from './database/migrations'; -import { jsonEndpoints } from './endpoints'; -import { emailSubscriptionResponder } from './responders/comm-landing-responders'; +import './cron/cron.js'; +import { migrate } from './database/migrations.js'; +import { jsonEndpoints } from './endpoints.js'; +import { emailSubscriptionResponder } from './responders/comm-landing-responders.js'; import { jsonHandler, httpGetHandler, downloadHandler, htmlHandler, uploadHandler, -} from './responders/handlers'; -import landingHandler from './responders/landing-handler'; -import { errorReportDownloadResponder } from './responders/report-responders'; +} from './responders/handlers.js'; +import landingHandler from './responders/landing-handler.js'; +import { errorReportDownloadResponder } from './responders/report-responders.js'; import { createNewVersionResponder, markVersionDeployedResponder, -} from './responders/version-responders'; -import { websiteResponder } from './responders/website-responders'; -import { onConnection } from './socket/socket'; +} from './responders/version-responders.js'; +import { websiteResponder } from './responders/website-responders.js'; +import { onConnection } from './socket/socket.js'; import { multerProcessor, multimediaUploadResponder, uploadDownloadResponder, -} from './uploads/uploads'; +} from './uploads/uploads.js'; import { prefetchAllURLFacts, getSquadCalURLFacts, getLandingURLFacts, getCommAppURLFacts, -} from './utils/urls'; +} from './utils/urls.js'; (async () => { await prefetchAllURLFacts(); const squadCalBaseRoutePath = getSquadCalURLFacts()?.baseRoutePath; const landingBaseRoutePath = getLandingURLFacts()?.baseRoutePath; const commAppBaseRoutePath = getCommAppURLFacts()?.baseRoutePath; const compiledFolderOptions = process.env.NODE_ENV === 'development' ? undefined : { maxAge: '1y', immutable: true }; if (cluster.isMaster) { const didMigrationsSucceed: boolean = await migrate(); if (!didMigrationsSucceed) { // The following line uses exit code 2 to ensure nodemon exits // in a dev environment, instead of restarting. Context provided // in https://github.com/remy/nodemon/issues/751 process.exit(2); } const cpuCount = os.cpus().length; for (let i = 0; i < cpuCount; i++) { cluster.fork(); } cluster.on('exit', () => cluster.fork()); } else { const server = express(); expressWs(server); server.use(express.json({ limit: '250mb' })); server.use(cookieParser()); const setupAppRouter = router => { router.use('/images', express.static('images')); router.use('/fonts', express.static('fonts')); router.use('/misc', express.static('misc')); router.use( '/.well-known', express.static( '.well-known', // Necessary for apple-app-site-association file { setHeaders: res => res.setHeader('Content-Type', 'application/json'), }, ), ); router.use( '/compiled', express.static('app_compiled', compiledFolderOptions), ); router.use('/', express.static('icons')); for (const endpoint in jsonEndpoints) { // $FlowFixMe Flow thinks endpoint is string const responder = jsonEndpoints[endpoint]; const expectCookieInvalidation = endpoint === 'log_out'; router.post( `/${endpoint}`, jsonHandler(responder, expectCookieInvalidation), ); } router.get( '/create_version/:deviceType/:codeVersion', httpGetHandler(createNewVersionResponder), ); router.get( '/mark_version_deployed/:deviceType/:codeVersion', httpGetHandler(markVersionDeployedResponder), ); router.get( '/download_error_report/:reportID', downloadHandler(errorReportDownloadResponder), ); router.get( '/upload/:uploadID/:secret', downloadHandler(uploadDownloadResponder), ); // $FlowFixMe express-ws has side effects that can't be typed router.ws('/ws', onConnection); router.get('*', htmlHandler(websiteResponder)); router.post( '/upload_multimedia', multerProcessor, uploadHandler(multimediaUploadResponder), ); }; // Note - the order of router declarations matters. On prod we have // squadCalBaseRoutePath configured to '/', which means it's a catch-all. If // we call server.use on squadCalRouter first, it will catch all requests // and prevent commAppRouter and landingRouter from working correctly. So we // make sure that squadCalRouter goes last if (landingBaseRoutePath) { const landingRouter = express.Router(); landingRouter.use('/images', express.static('images')); landingRouter.use('/fonts', express.static('fonts')); landingRouter.use( '/compiled', express.static('landing_compiled', compiledFolderOptions), ); landingRouter.use('/', express.static('landing_icons')); landingRouter.post('/subscribe_email', emailSubscriptionResponder); landingRouter.get('*', landingHandler); server.use(landingBaseRoutePath, landingRouter); } if (commAppBaseRoutePath) { const commAppRouter = express.Router(); setupAppRouter(commAppRouter); server.use(commAppBaseRoutePath, commAppRouter); } if (squadCalBaseRoutePath) { const squadCalRouter = express.Router(); setupAppRouter(squadCalRouter); server.use(squadCalBaseRoutePath, squadCalRouter); } const listenAddress = (() => { if (process.env.COMM_LISTEN_ADDR) { return process.env.COMM_LISTEN_ADDR; } else if (process.env.NODE_ENV === 'development') { return undefined; } else { return 'localhost'; } })(); server.listen(parseInt(process.env.PORT, 10) || 3000, listenAddress); } })(); diff --git a/keyserver/src/push/providers.js b/keyserver/src/push/providers.js index ff5191326..1c5ec81fd 100644 --- a/keyserver/src/push/providers.js +++ b/keyserver/src/push/providers.js @@ -1,91 +1,91 @@ // @flow import apn from '@parse/node-apn'; import type { Provider as APNProvider } from '@parse/node-apn'; import fcmAdmin from 'firebase-admin'; import type { FirebaseApp } from 'firebase-admin'; import invariant from 'invariant'; -import { importJSON } from '../utils/import-json'; +import { importJSON } from '../utils/import-json.js'; type APNPushProfile = 'apn_config' | 'comm_apn_config'; function getAPNPushProfileForCodeVersion(codeVersion: ?number): APNPushProfile { return codeVersion && codeVersion >= 87 ? 'comm_apn_config' : 'apn_config'; } type FCMPushProfile = 'fcm_config' | 'comm_fcm_config'; function getFCMPushProfileForCodeVersion(codeVersion: ?number): FCMPushProfile { return codeVersion && codeVersion >= 87 ? 'comm_fcm_config' : 'fcm_config'; } const cachedAPNProviders = new Map(); async function getAPNProvider(profile: APNPushProfile): Promise { const provider = cachedAPNProviders.get(profile); if (provider !== undefined) { return provider; } try { const apnConfig = await importJSON({ folder: 'secrets', name: profile }); invariant(apnConfig, `APN config missing for ${profile}`); if (!cachedAPNProviders.has(profile)) { cachedAPNProviders.set(profile, new apn.Provider(apnConfig)); } } catch { if (!cachedAPNProviders.has(profile)) { cachedAPNProviders.set(profile, null); } } return cachedAPNProviders.get(profile); } const cachedFCMProviders = new Map(); async function getFCMProvider(profile: FCMPushProfile): Promise { const provider = cachedFCMProviders.get(profile); if (provider !== undefined) { return provider; } try { const fcmConfig = await importJSON({ folder: 'secrets', name: profile }); invariant(fcmConfig, `FCM config missed for ${profile}`); if (!cachedFCMProviders.has(profile)) { cachedFCMProviders.set( profile, fcmAdmin.initializeApp( { credential: fcmAdmin.credential.cert(fcmConfig), }, profile, ), ); } } catch { if (!cachedFCMProviders.has(profile)) { cachedFCMProviders.set(profile, null); } } return cachedFCMProviders.get(profile); } function endFirebase() { fcmAdmin.apps?.forEach(app => app?.delete()); } function endAPNs() { for (const provider of cachedAPNProviders.values()) { provider?.shutdown(); } } function getAPNsNotificationTopic(codeVersion: ?number): string { return codeVersion && codeVersion >= 87 ? 'app.comm' : 'org.squadcal.app'; } export { getAPNPushProfileForCodeVersion, getFCMPushProfileForCodeVersion, getAPNProvider, getFCMProvider, endFirebase, endAPNs, getAPNsNotificationTopic, }; diff --git a/keyserver/src/push/rescind.js b/keyserver/src/push/rescind.js index 5e737e24e..c6cdec721 100644 --- a/keyserver/src/push/rescind.js +++ b/keyserver/src/push/rescind.js @@ -1,201 +1,201 @@ // @flow import apn from '@parse/node-apn'; import invariant from 'invariant'; -import { threadSubscriptions } from 'lib/types/subscription-types'; -import { threadPermissions } from 'lib/types/thread-types'; -import { promiseAll } from 'lib/utils/promises'; +import { threadSubscriptions } from 'lib/types/subscription-types.js'; +import { threadPermissions } from 'lib/types/thread-types.js'; +import { promiseAll } from 'lib/utils/promises.js'; -import createIDs from '../creators/id-creator'; -import { dbQuery, SQL } from '../database/database'; -import type { SQLStatementType } from '../database/types'; -import { getAPNsNotificationTopic } from './providers'; -import { apnPush, fcmPush } from './utils'; +import createIDs from '../creators/id-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import type { SQLStatementType } from '../database/types.js'; +import { getAPNsNotificationTopic } from './providers.js'; +import { apnPush, fcmPush } from './utils.js'; async function rescindPushNotifs( notifCondition: SQLStatementType, inputCountCondition?: SQLStatementType, ) { const notificationExtractString = `$.${threadSubscriptions.home}`; const visPermissionExtractString = `$.${threadPermissions.VISIBLE}.value`; const fetchQuery = SQL` SELECT n.id, n.user, n.thread, n.message, n.delivery, n.collapse_key, COUNT( `; fetchQuery.append(inputCountCondition ? inputCountCondition : SQL`m.thread`); fetchQuery.append(SQL` ) AS unread_count FROM notifications n LEFT JOIN memberships m ON m.user = n.user AND m.last_message > m.last_read_message AND m.role > 0 AND JSON_EXTRACT(subscription, ${notificationExtractString}) AND JSON_EXTRACT(permissions, ${visPermissionExtractString}) WHERE n.rescinded = 0 AND `); fetchQuery.append(notifCondition); fetchQuery.append(SQL` GROUP BY n.id, m.user`); const [fetchResult] = await dbQuery(fetchQuery); const deliveryPromises = {}; const notifInfo = {}; const rescindedIDs = []; for (const row of fetchResult) { const rawDelivery = JSON.parse(row.delivery); const deliveries = Array.isArray(rawDelivery) ? rawDelivery : [rawDelivery]; const id = row.id.toString(); const threadID = row.thread.toString(); notifInfo[id] = { userID: row.user.toString(), threadID, messageID: row.message.toString(), }; for (const delivery of deliveries) { if (delivery.iosID && delivery.iosDeviceTokens) { // Old iOS const notification = prepareIOSNotification( delivery.iosID, row.unread_count, threadID, ); deliveryPromises[id] = apnPush({ notification, deviceTokens: delivery.iosDeviceTokens, codeVersion: null, }); } else if (delivery.androidID) { // Old Android const notification = prepareAndroidNotification( row.collapse_key ? row.collapse_key : id, row.unread_count, threadID, ); deliveryPromises[id] = fcmPush({ notification, deviceTokens: delivery.androidDeviceTokens, codeVersion: null, }); } else if (delivery.deviceType === 'ios') { // New iOS const { iosID, deviceTokens, codeVersion } = delivery; const notification = prepareIOSNotification( iosID, row.unread_count, threadID, codeVersion, ); deliveryPromises[id] = apnPush({ notification, deviceTokens, codeVersion, }); } else if (delivery.deviceType === 'android') { // New Android const { deviceTokens, codeVersion } = delivery; const notification = prepareAndroidNotification( row.collapse_key ? row.collapse_key : id, row.unread_count, threadID, ); deliveryPromises[id] = fcmPush({ notification, deviceTokens, codeVersion, }); } } rescindedIDs.push(row.id); } const numRescinds = Object.keys(deliveryPromises).length; const promises = [promiseAll(deliveryPromises)]; if (numRescinds > 0) { promises.push(createIDs('notifications', numRescinds)); } if (rescindedIDs.length > 0) { const rescindQuery = SQL` UPDATE notifications SET rescinded = 1 WHERE id IN (${rescindedIDs}) `; promises.push(dbQuery(rescindQuery)); } const [deliveryResults, dbIDs] = await Promise.all(promises); const newNotifRows = []; if (numRescinds > 0) { invariant(dbIDs, 'dbIDs should be set'); for (const rescindedID in deliveryResults) { const delivery = {}; delivery.source = 'rescind'; delivery.rescindedID = rescindedID; const { errors } = deliveryResults[rescindedID]; if (errors) { delivery.errors = errors; } const dbID = dbIDs.shift(); const { userID, threadID, messageID } = notifInfo[rescindedID]; newNotifRows.push([ dbID, userID, threadID, messageID, null, JSON.stringify([delivery]), 1, ]); } } if (newNotifRows.length > 0) { const insertQuery = SQL` INSERT INTO notifications (id, user, thread, message, collapse_key, delivery, rescinded) VALUES ${newNotifRows} `; await dbQuery(insertQuery); } } function prepareIOSNotification( iosID: string, unreadCount: number, threadID: string, codeVersion: ?number, ): apn.Notification { const notification = new apn.Notification(); notification.contentAvailable = true; notification.topic = getAPNsNotificationTopic(codeVersion); notification.priority = 5; notification.pushType = 'background'; notification.payload = codeVersion && codeVersion > 135 ? { backgroundNotifType: 'CLEAR', notificationId: iosID, setUnreadStatus: true, threadID, } : { managedAps: { action: 'CLEAR', notificationId: iosID, }, }; return notification; } function prepareAndroidNotification( notifID: string, unreadCount: number, threadID: string, ): Object { return { data: { badge: unreadCount.toString(), rescind: 'true', rescindID: notifID, setUnreadStatus: 'true', threadID, }, }; } export { rescindPushNotifs }; diff --git a/keyserver/src/push/send.js b/keyserver/src/push/send.js index f93d1336b..369449042 100644 --- a/keyserver/src/push/send.js +++ b/keyserver/src/push/send.js @@ -1,844 +1,844 @@ // @flow import apn from '@parse/node-apn'; import type { ResponseFailure } from '@parse/node-apn'; import invariant from 'invariant'; -import _cloneDeep from 'lodash/fp/cloneDeep'; -import _flow from 'lodash/fp/flow'; -import _mapValues from 'lodash/fp/mapValues'; -import _pickBy from 'lodash/fp/pickBy'; -import uuidv4 from 'uuid/v4'; +import _cloneDeep from 'lodash/fp/cloneDeep.js'; +import _flow from 'lodash/fp/flow.js'; +import _mapValues from 'lodash/fp/mapValues.js'; +import _pickBy from 'lodash/fp/pickBy.js'; +import uuidv4 from 'uuid/v4.js'; -import { oldValidUsernameRegex } from 'lib/shared/account-utils'; +import { oldValidUsernameRegex } from 'lib/shared/account-utils.js'; import { createMessageInfo, sortMessageInfoList, shimUnsupportedRawMessageInfos, -} from 'lib/shared/message-utils'; -import { messageSpecs } from 'lib/shared/messages/message-specs'; -import { notifTextsForMessageInfo } from 'lib/shared/notif-utils'; +} from 'lib/shared/message-utils.js'; +import { messageSpecs } from 'lib/shared/messages/message-specs.js'; +import { notifTextsForMessageInfo } from 'lib/shared/notif-utils.js'; import { rawThreadInfoFromServerThreadInfo, threadInfoFromRawThreadInfo, -} from 'lib/shared/thread-utils'; -import type { DeviceType } from 'lib/types/device-types'; +} from 'lib/shared/thread-utils.js'; +import type { DeviceType } from 'lib/types/device-types.js'; import { type RawMessageInfo, type MessageInfo, messageTypes, -} from 'lib/types/message-types'; -import type { ServerThreadInfo, ThreadInfo } from 'lib/types/thread-types'; -import { updateTypes } from 'lib/types/update-types'; -import { promiseAll } from 'lib/utils/promises'; - -import createIDs from '../creators/id-creator'; -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL, mergeOrConditions } from '../database/database'; -import type { CollapsableNotifInfo } from '../fetchers/message-fetchers'; -import { fetchCollapsableNotifs } from '../fetchers/message-fetchers'; -import { fetchServerThreadInfos } from '../fetchers/thread-fetchers'; -import { fetchUserInfos } from '../fetchers/user-fetchers'; -import type { Viewer } from '../session/viewer'; -import { getENSNames } from '../utils/ens-cache'; -import { getAPNsNotificationTopic } from './providers'; +} from 'lib/types/message-types.js'; +import type { ServerThreadInfo, ThreadInfo } from 'lib/types/thread-types.js'; +import { updateTypes } from 'lib/types/update-types.js'; +import { promiseAll } from 'lib/utils/promises.js'; + +import createIDs from '../creators/id-creator.js'; +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL, mergeOrConditions } from '../database/database.js'; +import type { CollapsableNotifInfo } from '../fetchers/message-fetchers.js'; +import { fetchCollapsableNotifs } from '../fetchers/message-fetchers.js'; +import { fetchServerThreadInfos } from '../fetchers/thread-fetchers.js'; +import { fetchUserInfos } from '../fetchers/user-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; +import { getENSNames } from '../utils/ens-cache.js'; +import { getAPNsNotificationTopic } from './providers.js'; import { apnPush, fcmPush, getUnreadCounts, apnMaxNotificationPayloadByteSize, fcmMaxNotificationPayloadByteSize, -} from './utils'; +} from './utils.js'; type Device = { +deviceType: DeviceType, +deviceToken: string, +codeVersion: ?number, }; type PushUserInfo = { +devices: Device[], +messageInfos: RawMessageInfo[], }; type Delivery = IOSDelivery | AndroidDelivery | { collapsedInto: string }; type NotificationRow = { +dbID: string, +userID: string, +threadID?: ?string, +messageID?: ?string, +collapseKey?: ?string, +deliveries: Delivery[], }; export type PushInfo = { [userID: string]: PushUserInfo }; async function sendPushNotifs(pushInfo: PushInfo) { if (Object.keys(pushInfo).length === 0) { return; } const [ unreadCounts, { usersToCollapsableNotifInfo, serverThreadInfos, userInfos }, dbIDs, ] = await Promise.all([ getUnreadCounts(Object.keys(pushInfo)), fetchInfos(pushInfo), createDBIDs(pushInfo), ]); const deliveryPromises = []; const notifications: Map = new Map(); for (const userID in usersToCollapsableNotifInfo) { const threadInfos = _flow( _mapValues((serverThreadInfo: ServerThreadInfo) => { const rawThreadInfo = rawThreadInfoFromServerThreadInfo( serverThreadInfo, userID, ); if (!rawThreadInfo) { return null; } return threadInfoFromRawThreadInfo(rawThreadInfo, userID, userInfos); }), _pickBy(threadInfo => threadInfo), )(serverThreadInfos); for (const notifInfo of usersToCollapsableNotifInfo[userID]) { const hydrateMessageInfo = (rawMessageInfo: RawMessageInfo) => createMessageInfo(rawMessageInfo, userID, userInfos, threadInfos); const newMessageInfos = []; const newRawMessageInfos = []; for (const newRawMessageInfo of notifInfo.newMessageInfos) { const newMessageInfo = hydrateMessageInfo(newRawMessageInfo); if (newMessageInfo) { newMessageInfos.push(newMessageInfo); newRawMessageInfos.push(newRawMessageInfo); } } if (newMessageInfos.length === 0) { continue; } const existingMessageInfos = notifInfo.existingMessageInfos .map(hydrateMessageInfo) .filter(Boolean); const allMessageInfos = sortMessageInfoList([ ...newMessageInfos, ...existingMessageInfos, ]); const [ firstNewMessageInfo, ...remainingNewMessageInfos ] = newMessageInfos; const threadID = firstNewMessageInfo.threadID; const threadInfo = threadInfos[threadID]; const updateBadge = threadInfo.currentUser.subscription.home; const displayBanner = threadInfo.currentUser.subscription.pushNotifs; const username = userInfos[userID] && userInfos[userID].username; const userWasMentioned = username && threadInfo.currentUser.role && oldValidUsernameRegex.test(username) && firstNewMessageInfo.type === messageTypes.TEXT && new RegExp(`\\B@${username}\\b`, 'i').test(firstNewMessageInfo.text); if (!updateBadge && !displayBanner && !userWasMentioned) { continue; } const badgeOnly = !displayBanner && !userWasMentioned; const dbID = dbIDs.shift(); invariant(dbID, 'should have sufficient DB IDs'); const byDeviceType = getDevicesByDeviceType(pushInfo[userID].devices); const firstMessageID = firstNewMessageInfo.id; invariant(firstMessageID, 'RawMessageInfo.id should be set on server'); const notificationInfo = { source: 'new_message', dbID, userID, threadID, messageID: firstMessageID, collapseKey: notifInfo.collapseKey, }; const iosVersionsToTokens = byDeviceType.get('ios'); if (iosVersionsToTokens) { for (const [codeVer, deviceTokens] of iosVersionsToTokens) { const codeVersion = parseInt(codeVer, 10); // only for Flow const shimmedNewRawMessageInfos = shimUnsupportedRawMessageInfos( newRawMessageInfos, { platform: 'ios', codeVersion }, ); const deliveryPromise = (async () => { const notification = await prepareIOSNotification( allMessageInfos, shimmedNewRawMessageInfos, threadInfo, notifInfo.collapseKey, badgeOnly, unreadCounts[userID], codeVersion, ); return await sendIOSNotification(notification, [...deviceTokens], { ...notificationInfo, codeVersion, }); })(); deliveryPromises.push(deliveryPromise); } } const androidVersionsToTokens = byDeviceType.get('android'); if (androidVersionsToTokens) { for (const [codeVer, deviceTokens] of androidVersionsToTokens) { const codeVersion = parseInt(codeVer, 10); // only for Flow const shimmedNewRawMessageInfos = shimUnsupportedRawMessageInfos( newRawMessageInfos, { platform: 'android', codeVersion }, ); const deliveryPromise = (async () => { const notification = await prepareAndroidNotification( allMessageInfos, shimmedNewRawMessageInfos, threadInfo, notifInfo.collapseKey, badgeOnly, unreadCounts[userID], dbID, codeVersion, ); return await sendAndroidNotification( notification, [...deviceTokens], { ...notificationInfo, codeVersion, }, ); })(); deliveryPromises.push(deliveryPromise); } } for (const newMessageInfo of remainingNewMessageInfos) { const newDBID = dbIDs.shift(); invariant(newDBID, 'should have sufficient DB IDs'); const messageID = newMessageInfo.id; invariant(messageID, 'RawMessageInfo.id should be set on server'); notifications.set(newDBID, { dbID: newDBID, userID, threadID: newMessageInfo.threadID, messageID, collapseKey: notifInfo.collapseKey, deliveries: [{ collapsedInto: dbID }], }); } } } const cleanUpPromises = []; if (dbIDs.length > 0) { const query = SQL`DELETE FROM ids WHERE id IN (${dbIDs})`; cleanUpPromises.push(dbQuery(query)); } const [deliveryResults] = await Promise.all([ Promise.all(deliveryPromises), Promise.all(cleanUpPromises), ]); await saveNotifResults(deliveryResults, notifications, true); } // The results in deliveryResults will be combined with the rows // in rowsToSave and then written to the notifications table async function saveNotifResults( deliveryResults: $ReadOnlyArray, inputRowsToSave: Map, rescindable: boolean, ) { const rowsToSave = new Map(inputRowsToSave); const allInvalidTokens = []; for (const deliveryResult of deliveryResults) { const { info, delivery, invalidTokens } = deliveryResult; const { dbID, userID } = info; const curNotifRow = rowsToSave.get(dbID); if (curNotifRow) { curNotifRow.deliveries.push(delivery); } else { // Ternary expressions for Flow const threadID = info.threadID ? info.threadID : null; const messageID = info.messageID ? info.messageID : null; const collapseKey = info.collapseKey ? info.collapseKey : null; rowsToSave.set(dbID, { dbID, userID, threadID, messageID, collapseKey, deliveries: [delivery], }); } if (invalidTokens) { allInvalidTokens.push({ userID, tokens: invalidTokens, }); } } const notificationRows = []; for (const notification of rowsToSave.values()) { notificationRows.push([ notification.dbID, notification.userID, notification.threadID, notification.messageID, notification.collapseKey, JSON.stringify(notification.deliveries), Number(!rescindable), ]); } const dbPromises = []; if (allInvalidTokens.length > 0) { dbPromises.push(removeInvalidTokens(allInvalidTokens)); } if (notificationRows.length > 0) { const query = SQL` INSERT INTO notifications (id, user, thread, message, collapse_key, delivery, rescinded) VALUES ${notificationRows} `; dbPromises.push(dbQuery(query)); } if (dbPromises.length > 0) { await Promise.all(dbPromises); } } async function fetchInfos(pushInfo: PushInfo) { const usersToCollapsableNotifInfo = await fetchCollapsableNotifs(pushInfo); const threadIDs = new Set(); const threadWithChangedNamesToMessages = new Map(); const addThreadIDsFromMessageInfos = (rawMessageInfo: RawMessageInfo) => { const threadID = rawMessageInfo.threadID; threadIDs.add(threadID); const messageSpec = messageSpecs[rawMessageInfo.type]; if (messageSpec.threadIDs) { for (const id of messageSpec.threadIDs(rawMessageInfo)) { threadIDs.add(id); } } if ( rawMessageInfo.type === messageTypes.CHANGE_SETTINGS && rawMessageInfo.field === 'name' ) { const messages = threadWithChangedNamesToMessages.get(threadID); if (messages) { messages.push(rawMessageInfo.id); } else { threadWithChangedNamesToMessages.set(threadID, [rawMessageInfo.id]); } } }; for (const userID in usersToCollapsableNotifInfo) { for (const notifInfo of usersToCollapsableNotifInfo[userID]) { for (const rawMessageInfo of notifInfo.existingMessageInfos) { addThreadIDsFromMessageInfos(rawMessageInfo); } for (const rawMessageInfo of notifInfo.newMessageInfos) { addThreadIDsFromMessageInfos(rawMessageInfo); } } } const promises = {}; // These threadInfos won't have currentUser set promises.threadResult = fetchServerThreadInfos( SQL`t.id IN (${[...threadIDs]})`, ); if (threadWithChangedNamesToMessages.size > 0) { const typesThatAffectName = [ messageTypes.CHANGE_SETTINGS, messageTypes.CREATE_THREAD, ]; const oldNameQuery = SQL` SELECT IF( JSON_TYPE(JSON_EXTRACT(m.content, "$.name")) = 'NULL', "", JSON_UNQUOTE(JSON_EXTRACT(m.content, "$.name")) ) AS name, m.thread FROM ( SELECT MAX(id) AS id FROM messages WHERE type IN (${typesThatAffectName}) AND JSON_EXTRACT(content, "$.name") IS NOT NULL AND`; const threadClauses = []; for (const [threadID, messages] of threadWithChangedNamesToMessages) { threadClauses.push( SQL`(thread = ${threadID} AND id NOT IN (${messages}))`, ); } oldNameQuery.append(mergeOrConditions(threadClauses)); oldNameQuery.append(SQL` GROUP BY thread ) x LEFT JOIN messages m ON m.id = x.id `); promises.oldNames = dbQuery(oldNameQuery); } const { threadResult, oldNames } = await promiseAll(promises); const serverThreadInfos = threadResult.threadInfos; if (oldNames) { const [result] = oldNames; for (const row of result) { const threadID = row.thread.toString(); serverThreadInfos[threadID].name = row.name; } } const userInfos = await fetchNotifUserInfos( serverThreadInfos, usersToCollapsableNotifInfo, ); return { usersToCollapsableNotifInfo, serverThreadInfos, userInfos }; } async function fetchNotifUserInfos( serverThreadInfos: { +[threadID: string]: ServerThreadInfo }, usersToCollapsableNotifInfo: { +[userID: string]: CollapsableNotifInfo[] }, ) { const missingUserIDs = new Set(); for (const threadID in serverThreadInfos) { const serverThreadInfo = serverThreadInfos[threadID]; for (const member of serverThreadInfo.members) { missingUserIDs.add(member.id); } } const addUserIDsFromMessageInfos = (rawMessageInfo: RawMessageInfo) => { missingUserIDs.add(rawMessageInfo.creatorID); const userIDs = messageSpecs[rawMessageInfo.type].userIDs?.(rawMessageInfo) ?? []; for (const userID of userIDs) { missingUserIDs.add(userID); } }; for (const userID in usersToCollapsableNotifInfo) { for (const notifInfo of usersToCollapsableNotifInfo[userID]) { for (const rawMessageInfo of notifInfo.existingMessageInfos) { addUserIDsFromMessageInfos(rawMessageInfo); } for (const rawMessageInfo of notifInfo.newMessageInfos) { addUserIDsFromMessageInfos(rawMessageInfo); } } } return await fetchUserInfos([...missingUserIDs]); } async function createDBIDs(pushInfo: PushInfo): Promise { let numIDsNeeded = 0; for (const userID in pushInfo) { numIDsNeeded += pushInfo[userID].messageInfos.length; } return await createIDs('notifications', numIDsNeeded); } function getDevicesByDeviceType( devices: Device[], ): Map>> { const byDeviceType = new Map(); for (const device of devices) { let innerMap = byDeviceType.get(device.deviceType); if (!innerMap) { innerMap = new Map(); byDeviceType.set(device.deviceType, innerMap); } const codeVersion: number = device.codeVersion !== null && device.codeVersion !== undefined ? device.codeVersion : -1; let innerMostSet = innerMap.get(codeVersion); if (!innerMostSet) { innerMostSet = new Set(); innerMap.set(codeVersion, innerMostSet); } innerMostSet.add(device.deviceToken); } return byDeviceType; } async function prepareIOSNotification( allMessageInfos: MessageInfo[], newRawMessageInfos: RawMessageInfo[], threadInfo: ThreadInfo, collapseKey: ?string, badgeOnly: boolean, unreadCount: number, codeVersion: number, ): Promise { const uniqueID = uuidv4(); const notification = new apn.Notification(); notification.topic = getAPNsNotificationTopic(codeVersion); const { merged, ...rest } = await notifTextsForMessageInfo( allMessageInfos, threadInfo, getENSNames, ); if (!badgeOnly) { notification.body = merged; notification.sound = 'default'; } notification.payload = { ...notification.payload, ...rest, }; notification.badge = unreadCount; notification.threadId = threadInfo.id; notification.id = uniqueID; notification.pushType = 'alert'; notification.payload.id = uniqueID; notification.payload.threadID = threadInfo.id; if (codeVersion > 1000) { notification.mutableContent = true; } if (collapseKey) { notification.collapseId = collapseKey; } const messageInfos = JSON.stringify(newRawMessageInfos); // We make a copy before checking notification's length, because calling // length compiles the notification and makes it immutable. Further // changes to its properties won't be reflected in the final plaintext // data that is sent. const copyWithMessageInfos = _cloneDeep(notification); copyWithMessageInfos.payload = { ...copyWithMessageInfos.payload, messageInfos, }; if (copyWithMessageInfos.length() <= apnMaxNotificationPayloadByteSize) { notification.payload.messageInfos = messageInfos; return notification; } const notificationCopy = _cloneDeep(notification); if (notificationCopy.length() > apnMaxNotificationPayloadByteSize) { console.warn( `iOS notification ${uniqueID} exceeds size limit, even with messageInfos omitted`, ); } return notification; } async function prepareAndroidNotification( allMessageInfos: MessageInfo[], newRawMessageInfos: RawMessageInfo[], threadInfo: ThreadInfo, collapseKey: ?string, badgeOnly: boolean, unreadCount: number, dbID: string, codeVersion: number, ): Promise { const notifID = collapseKey ? collapseKey : dbID; const { merged, ...rest } = await notifTextsForMessageInfo( allMessageInfos, threadInfo, getENSNames, ); const notification = { data: { badge: unreadCount.toString(), ...rest, threadID: threadInfo.id, }, }; // The reason we only include `badgeOnly` for newer clients is because older // clients don't know how to parse it. The reason we only include `id` for // newer clients is that if the older clients see that field, they assume // the notif has a full payload, and then crash when trying to parse it. // By skipping `id` we allow old clients to still handle in-app notifs and // badge updating. if (!badgeOnly || codeVersion >= 69) { notification.data = { ...notification.data, id: notifID, badgeOnly: badgeOnly ? '1' : '0', }; } const messageInfos = JSON.stringify(newRawMessageInfos); const copyWithMessageInfos = { ...notification, data: { ...notification.data, messageInfos }, }; if ( Buffer.byteLength(JSON.stringify(copyWithMessageInfos)) <= fcmMaxNotificationPayloadByteSize ) { return copyWithMessageInfos; } if ( Buffer.byteLength(JSON.stringify(notification)) > fcmMaxNotificationPayloadByteSize ) { console.warn( `Android notification ${notifID} exceeds size limit, even with messageInfos omitted`, ); } return notification; } type NotificationInfo = | { +source: 'new_message', +dbID: string, +userID: string, +threadID: string, +messageID: string, +collapseKey: ?string, +codeVersion: number, } | { +source: 'mark_as_unread' | 'mark_as_read' | 'activity_update', +dbID: string, +userID: string, +codeVersion: number, }; type IOSDelivery = { source: $PropertyType, deviceType: 'ios', iosID: string, deviceTokens: $ReadOnlyArray, codeVersion: number, errors?: $ReadOnlyArray, }; type IOSResult = { info: NotificationInfo, delivery: IOSDelivery, invalidTokens?: $ReadOnlyArray, }; async function sendIOSNotification( notification: apn.Notification, deviceTokens: $ReadOnlyArray, notificationInfo: NotificationInfo, ): Promise { const { source, codeVersion } = notificationInfo; const response = await apnPush({ notification, deviceTokens, codeVersion }); const delivery: IOSDelivery = { source, deviceType: 'ios', iosID: notification.id, deviceTokens, codeVersion, }; if (response.errors) { delivery.errors = response.errors; } const result: IOSResult = { info: notificationInfo, delivery, }; if (response.invalidTokens) { result.invalidTokens = response.invalidTokens; } return result; } type AndroidDelivery = { source: $PropertyType, deviceType: 'android', androidIDs: $ReadOnlyArray, deviceTokens: $ReadOnlyArray, codeVersion: number, errors?: $ReadOnlyArray, }; type AndroidResult = { info: NotificationInfo, delivery: AndroidDelivery, invalidTokens?: $ReadOnlyArray, }; async function sendAndroidNotification( notification: Object, deviceTokens: $ReadOnlyArray, notificationInfo: NotificationInfo, ): Promise { const collapseKey = notificationInfo.collapseKey ? notificationInfo.collapseKey : null; // for Flow... const { source, codeVersion } = notificationInfo; const response = await fcmPush({ notification, deviceTokens, collapseKey, codeVersion, }); const androidIDs = response.fcmIDs ? response.fcmIDs : []; const delivery: AndroidDelivery = { source, deviceType: 'android', androidIDs, deviceTokens, codeVersion, }; if (response.errors) { delivery.errors = response.errors; } const result: AndroidResult = { info: notificationInfo, delivery, }; if (response.invalidTokens) { result.invalidTokens = response.invalidTokens; } return result; } type InvalidToken = { +userID: string, +tokens: $ReadOnlyArray, }; async function removeInvalidTokens( invalidTokens: $ReadOnlyArray, ): Promise { const sqlTuples = invalidTokens.map( invalidTokenUser => SQL`( user = ${invalidTokenUser.userID} AND device_token IN (${invalidTokenUser.tokens}) )`, ); const sqlCondition = mergeOrConditions(sqlTuples); const selectQuery = SQL` SELECT id, user, device_token FROM cookies WHERE `; selectQuery.append(sqlCondition); const [result] = await dbQuery(selectQuery); const userCookiePairsToInvalidDeviceTokens = new Map(); for (const row of result) { const userCookiePair = `${row.user}|${row.id}`; const existing = userCookiePairsToInvalidDeviceTokens.get(userCookiePair); if (existing) { existing.add(row.device_token); } else { userCookiePairsToInvalidDeviceTokens.set( userCookiePair, new Set([row.device_token]), ); } } const time = Date.now(); const promises = []; for (const entry of userCookiePairsToInvalidDeviceTokens) { const [userCookiePair, deviceTokens] = entry; const [userID, cookieID] = userCookiePair.split('|'); const updateDatas = [...deviceTokens].map(deviceToken => ({ type: updateTypes.BAD_DEVICE_TOKEN, userID, time, deviceToken, targetCookie: cookieID, })); promises.push(createUpdates(updateDatas)); } const updateQuery = SQL` UPDATE cookies SET device_token = NULL WHERE `; updateQuery.append(sqlCondition); promises.push(dbQuery(updateQuery)); await Promise.all(promises); } async function updateBadgeCount( viewer: Viewer, source: 'mark_as_unread' | 'mark_as_read' | 'activity_update', ) { const { userID } = viewer; const deviceTokenQuery = SQL` SELECT platform, device_token, versions FROM cookies WHERE user = ${userID} AND device_token IS NOT NULL `; if (viewer.data.cookieID) { deviceTokenQuery.append(SQL`AND id != ${viewer.cookieID} `); } const [unreadCounts, [deviceTokenResult], [dbID]] = await Promise.all([ getUnreadCounts([userID]), dbQuery(deviceTokenQuery), createIDs('notifications', 1), ]); const unreadCount = unreadCounts[userID]; const devices = deviceTokenResult.map(row => ({ deviceType: row.platform, deviceToken: row.device_token, codeVersion: JSON.parse(row.versions)?.codeVersion, })); const byDeviceType = getDevicesByDeviceType(devices); const deliveryPromises = []; const iosVersionsToTokens = byDeviceType.get('ios'); if (iosVersionsToTokens) { for (const [codeVer, deviceTokens] of iosVersionsToTokens) { const codeVersion = parseInt(codeVer, 10); // only for Flow const notification = new apn.Notification(); notification.topic = getAPNsNotificationTopic(codeVersion); notification.badge = unreadCount; notification.pushType = 'alert'; deliveryPromises.push( sendIOSNotification(notification, [...deviceTokens], { source, dbID, userID, codeVersion, }), ); } } const androidVersionsToTokens = byDeviceType.get('android'); if (androidVersionsToTokens) { for (const [codeVer, deviceTokens] of androidVersionsToTokens) { const codeVersion = parseInt(codeVer, 10); // only for Flow const notificationData = codeVersion < 69 ? { badge: unreadCount.toString() } : { badge: unreadCount.toString(), badgeOnly: '1' }; const notification = { data: notificationData }; deliveryPromises.push( sendAndroidNotification(notification, [...deviceTokens], { source, dbID, userID, codeVersion, }), ); } } const deliveryResults = await Promise.all(deliveryPromises); await saveNotifResults(deliveryResults, new Map(), false); } export { sendPushNotifs, updateBadgeCount }; diff --git a/keyserver/src/push/utils.js b/keyserver/src/push/utils.js index 42ed239d6..7804c2387 100644 --- a/keyserver/src/push/utils.js +++ b/keyserver/src/push/utils.js @@ -1,207 +1,207 @@ // @flow import apn from '@parse/node-apn'; import type { ResponseFailure } from '@parse/node-apn'; import type { FirebaseApp, FirebaseError } from 'firebase-admin'; import invariant from 'invariant'; -import { threadSubscriptions } from 'lib/types/subscription-types'; -import { threadPermissions } from 'lib/types/thread-types'; +import { threadSubscriptions } from 'lib/types/subscription-types.js'; +import { threadPermissions } from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; import { getAPNPushProfileForCodeVersion, getFCMPushProfileForCodeVersion, getAPNProvider, getFCMProvider, -} from './providers'; +} from './providers.js'; const fcmTokenInvalidationErrors = new Set([ 'messaging/registration-token-not-registered', 'messaging/invalid-registration-token', ]); const fcmMaxNotificationPayloadByteSize = 4000; const apnTokenInvalidationErrorCode = 410; const apnBadRequestErrorCode = 400; const apnBadTokenErrorString = 'BadDeviceToken'; const apnMaxNotificationPayloadByteSize = 4096; type APNPushResult = | { +success: true } | { +errors: $ReadOnlyArray, +invalidTokens?: $ReadOnlyArray, }; async function apnPush({ notification, deviceTokens, codeVersion, }: { +notification: apn.Notification, +deviceTokens: $ReadOnlyArray, +codeVersion: ?number, }): Promise { const pushProfile = getAPNPushProfileForCodeVersion(codeVersion); const apnProvider = await getAPNProvider(pushProfile); if (!apnProvider && process.env.NODE_ENV === 'development') { console.log(`no keyserver/secrets/${pushProfile}.json so ignoring notifs`); return { success: true }; } invariant(apnProvider, `keyserver/secrets/${pushProfile}.json should exist`); const result = await apnProvider.send(notification, deviceTokens); const errors = []; const invalidTokens = []; for (const error of result.failed) { errors.push(error); /* eslint-disable eqeqeq */ if ( error.status == apnTokenInvalidationErrorCode || (error.status == apnBadRequestErrorCode && error.response.reason === apnBadTokenErrorString) ) { invalidTokens.push(error.device); } /* eslint-enable eqeqeq */ } if (invalidTokens.length > 0) { return { errors, invalidTokens }; } else if (errors.length > 0) { return { errors }; } else { return { success: true }; } } type FCMPushResult = { +success?: true, +fcmIDs?: $ReadOnlyArray, +errors?: $ReadOnlyArray, +invalidTokens?: $ReadOnlyArray, }; async function fcmPush({ notification, deviceTokens, collapseKey, codeVersion, }: { +notification: Object, +deviceTokens: $ReadOnlyArray, +codeVersion: ?number, +collapseKey?: ?string, }): Promise { const pushProfile = getFCMPushProfileForCodeVersion(codeVersion); const fcmProvider = await getFCMProvider(pushProfile); if (!fcmProvider && process.env.NODE_ENV === 'development') { console.log(`no keyserver/secrets/${pushProfile}.json so ignoring notifs`); return { success: true }; } invariant(fcmProvider, `keyserver/secrets/${pushProfile}.json should exist`); const options: Object = { priority: 'high', }; if (collapseKey) { options.collapseKey = collapseKey; } // firebase-admin is extremely barebones and has a lot of missing or poorly // thought-out functionality. One of the issues is that if you send a // multicast messages and one of the device tokens is invalid, the resultant // won't explain which of the device tokens is invalid. So we're forced to // avoid the multicast functionality and call it once per deviceToken. const promises = []; for (const deviceToken of deviceTokens) { promises.push( fcmSinglePush(fcmProvider, notification, deviceToken, options), ); } const pushResults = await Promise.all(promises); const errors = []; const ids = []; const invalidTokens = []; for (let i = 0; i < pushResults.length; i++) { const pushResult = pushResults[i]; for (const error of pushResult.errors) { errors.push(error); if (fcmTokenInvalidationErrors.has(error.errorInfo.code)) { invalidTokens.push(deviceTokens[i]); } } for (const id of pushResult.fcmIDs) { ids.push(id); } } const result = {}; if (ids.length > 0) { result.fcmIDs = ids; } if (errors.length > 0) { result.errors = errors; } else { result.success = true; } if (invalidTokens.length > 0) { result.invalidTokens = invalidTokens; } return { ...result }; } async function fcmSinglePush( provider: FirebaseApp, notification: Object, deviceToken: string, options: Object, ) { try { const deliveryResult = await provider .messaging() .sendToDevice(deviceToken, notification, options); const errors = []; const ids = []; for (const fcmResult of deliveryResult.results) { if (fcmResult.error) { errors.push(fcmResult.error); } else if (fcmResult.messageId) { ids.push(fcmResult.messageId); } } return { fcmIDs: ids, errors }; } catch (e) { return { fcmIDs: [], errors: [e] }; } } async function getUnreadCounts( userIDs: string[], ): Promise<{ [userID: string]: number }> { const visPermissionExtractString = `$.${threadPermissions.VISIBLE}.value`; const notificationExtractString = `$.${threadSubscriptions.home}`; const query = SQL` SELECT user, COUNT(thread) AS unread_count FROM memberships WHERE user IN (${userIDs}) AND last_message > last_read_message AND role > 0 AND JSON_EXTRACT(permissions, ${visPermissionExtractString}) AND JSON_EXTRACT(subscription, ${notificationExtractString}) GROUP BY user `; const [result] = await dbQuery(query); const usersToUnreadCounts = {}; for (const row of result) { usersToUnreadCounts[row.user.toString()] = row.unread_count; } for (const userID of userIDs) { if (usersToUnreadCounts[userID] === undefined) { usersToUnreadCounts[userID] = 0; } } return usersToUnreadCounts; } export { apnPush, fcmPush, getUnreadCounts, apnMaxNotificationPayloadByteSize, fcmMaxNotificationPayloadByteSize, }; diff --git a/keyserver/src/responders/activity-responders.js b/keyserver/src/responders/activity-responders.js index f017570ef..4ae0722b5 100644 --- a/keyserver/src/responders/activity-responders.js +++ b/keyserver/src/responders/activity-responders.js @@ -1,61 +1,61 @@ // @flow import t from 'tcomb'; import type { TList, TInterface } from 'tcomb'; import type { UpdateActivityResult, UpdateActivityRequest, SetThreadUnreadStatusRequest, SetThreadUnreadStatusResult, -} from 'lib/types/activity-types'; -import { tShape } from 'lib/utils/validation-utils'; +} from 'lib/types/activity-types.js'; +import { tShape } from 'lib/utils/validation-utils.js'; -import type { Viewer } from '../session/viewer'; +import type { Viewer } from '../session/viewer.js'; import { activityUpdater, setThreadUnreadStatus, -} from '../updaters/activity-updaters'; -import { validateInput } from '../utils/validation-utils'; +} from '../updaters/activity-updaters.js'; +import { validateInput } from '../utils/validation-utils.js'; const activityUpdatesInputValidator: TList = t.list( tShape({ focus: t.Bool, threadID: t.String, latestMessage: t.maybe(t.String), }), ); const inputValidator = tShape({ updates: activityUpdatesInputValidator, }); async function updateActivityResponder( viewer: Viewer, input: any, ): Promise { const request: UpdateActivityRequest = input; await validateInput(viewer, inputValidator, request); return await activityUpdater(viewer, request); } const setThreadUnreadStatusValidator = tShape({ threadID: t.String, unread: t.Bool, latestMessage: t.maybe(t.String), }); async function threadSetUnreadStatusResponder( viewer: Viewer, input: any, ): Promise { const request: SetThreadUnreadStatusRequest = input; await validateInput(viewer, setThreadUnreadStatusValidator, request); return await setThreadUnreadStatus(viewer, request); } export { activityUpdatesInputValidator, updateActivityResponder, threadSetUnreadStatusResponder, }; diff --git a/keyserver/src/responders/comm-landing-responders.js b/keyserver/src/responders/comm-landing-responders.js index 81ab2fc9e..23469271a 100644 --- a/keyserver/src/responders/comm-landing-responders.js +++ b/keyserver/src/responders/comm-landing-responders.js @@ -1,34 +1,34 @@ // @flow import type { $Response, $Request } from 'express'; -import { type EmailSubscriptionRequest } from 'lib/types/account-types'; -import { ServerError } from 'lib/utils/errors'; -import { tShape, tEmail } from 'lib/utils/validation-utils'; +import { type EmailSubscriptionRequest } from 'lib/types/account-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { tShape, tEmail } from 'lib/utils/validation-utils.js'; -import { sendEmailSubscriptionRequestToAshoat } from '../emails/subscribe-email-updates'; -import { checkInputValidator } from '../utils/validation-utils'; +import { sendEmailSubscriptionRequestToAshoat } from '../emails/subscribe-email-updates.js'; +import { checkInputValidator } from '../utils/validation-utils.js'; const emailSubscriptionInputValidator = tShape({ email: tEmail, }); async function emailSubscriptionResponder( req: $Request, res: $Response, ): Promise { try { if (!req.body || typeof req.body !== 'object') { throw new ServerError('invalid_parameters'); } const input: any = req.body; checkInputValidator(emailSubscriptionInputValidator, input); const subscriptionRequest: EmailSubscriptionRequest = input; await sendEmailSubscriptionRequestToAshoat(subscriptionRequest); res.json({ success: true }); } catch { res.json({ success: false }); } } export { emailSubscriptionResponder }; diff --git a/keyserver/src/responders/device-responders.js b/keyserver/src/responders/device-responders.js index 7bf878f6d..38974a732 100644 --- a/keyserver/src/responders/device-responders.js +++ b/keyserver/src/responders/device-responders.js @@ -1,28 +1,28 @@ // @flow import t from 'tcomb'; import type { TInterface } from 'tcomb'; -import type { DeviceTokenUpdateRequest } from 'lib/types/device-types'; -import { tShape, tPlatformDetails } from 'lib/utils/validation-utils'; +import type { DeviceTokenUpdateRequest } from 'lib/types/device-types.js'; +import { tShape, tPlatformDetails } from 'lib/utils/validation-utils.js'; -import type { Viewer } from '../session/viewer'; -import { deviceTokenUpdater } from '../updaters/device-token-updaters'; -import { validateInput } from '../utils/validation-utils'; +import type { Viewer } from '../session/viewer.js'; +import { deviceTokenUpdater } from '../updaters/device-token-updaters.js'; +import { validateInput } from '../utils/validation-utils.js'; const deviceTokenUpdateRequestInputValidator: TInterface = tShape({ deviceToken: t.maybe(t.String), deviceType: t.maybe(t.enums.of(['ios', 'android'])), platformDetails: t.maybe(tPlatformDetails), }); async function deviceTokenUpdateResponder( viewer: Viewer, input: any, ): Promise { const request: DeviceTokenUpdateRequest = input; await validateInput(viewer, deviceTokenUpdateRequestInputValidator, request); await deviceTokenUpdater(viewer, request); } export { deviceTokenUpdateRequestInputValidator, deviceTokenUpdateResponder }; diff --git a/keyserver/src/responders/entry-responders.js b/keyserver/src/responders/entry-responders.js index ba751ad46..e2c828a05 100644 --- a/keyserver/src/responders/entry-responders.js +++ b/keyserver/src/responders/entry-responders.js @@ -1,255 +1,255 @@ // @flow import t from 'tcomb'; import type { TInterface } from 'tcomb'; -import { filteredThreadIDs } from 'lib/selectors/calendar-filter-selectors'; +import { filteredThreadIDs } from 'lib/selectors/calendar-filter-selectors.js'; import type { CalendarQuery, SaveEntryRequest, CreateEntryRequest, DeleteEntryRequest, DeleteEntryResponse, RestoreEntryRequest, RestoreEntryResponse, FetchEntryInfosResponse, DeltaEntryInfosResult, SaveEntryResponse, -} from 'lib/types/entry-types'; -import { calendarThreadFilterTypes } from 'lib/types/filter-types'; +} from 'lib/types/entry-types.js'; +import { calendarThreadFilterTypes } from 'lib/types/filter-types.js'; import type { FetchEntryRevisionInfosResult, FetchEntryRevisionInfosRequest, -} from 'lib/types/history-types'; -import { ServerError } from 'lib/utils/errors'; -import { tString, tShape, tDate } from 'lib/utils/validation-utils'; +} from 'lib/types/history-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { tString, tShape, tDate } from 'lib/utils/validation-utils.js'; -import createEntry from '../creators/entry-creator'; -import { deleteEntry, restoreEntry } from '../deleters/entry-deleters'; +import createEntry from '../creators/entry-creator.js'; +import { deleteEntry, restoreEntry } from '../deleters/entry-deleters.js'; import { fetchEntryInfos, fetchEntryRevisionInfo, fetchEntriesForSession, -} from '../fetchers/entry-fetchers'; -import { verifyThreadIDs } from '../fetchers/thread-fetchers'; -import type { Viewer } from '../session/viewer'; +} from '../fetchers/entry-fetchers.js'; +import { verifyThreadIDs } from '../fetchers/thread-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; import { updateEntry, compareNewCalendarQuery, -} from '../updaters/entry-updaters'; -import { commitSessionUpdate } from '../updaters/session-updaters'; -import { validateInput } from '../utils/validation-utils'; +} from '../updaters/entry-updaters.js'; +import { commitSessionUpdate } from '../updaters/session-updaters.js'; +import { validateInput } from '../utils/validation-utils.js'; const entryQueryInputValidator: TInterface = tShape({ navID: t.maybe(t.String), startDate: tDate, endDate: tDate, includeDeleted: t.maybe(t.Boolean), filters: t.maybe( t.list( t.union([ tShape({ type: tString(calendarThreadFilterTypes.NOT_DELETED), }), tShape({ type: tString(calendarThreadFilterTypes.THREAD_LIST), threadIDs: t.list(t.String), }), ]), ), ), }); const newEntryQueryInputValidator: TInterface = tShape({ startDate: tDate, endDate: tDate, filters: t.list( t.union([ tShape({ type: tString(calendarThreadFilterTypes.NOT_DELETED), }), tShape({ type: tString(calendarThreadFilterTypes.THREAD_LIST), threadIDs: t.list(t.String), }), ]), ), }); function normalizeCalendarQuery(input: any): CalendarQuery { if (input.filters) { return { startDate: input.startDate, endDate: input.endDate, filters: input.filters, }; } const filters = []; if (!input.includeDeleted) { filters.push({ type: calendarThreadFilterTypes.NOT_DELETED }); } if (input.navID !== 'home') { filters.push({ type: calendarThreadFilterTypes.THREAD_LIST, threadIDs: [input.navID], }); } return { startDate: input.startDate, endDate: input.endDate, filters, }; } async function verifyCalendarQueryThreadIDs( request: CalendarQuery, ): Promise { const threadIDsToFilterTo = filteredThreadIDs(request.filters); if (threadIDsToFilterTo && threadIDsToFilterTo.size > 0) { const verifiedThreadIDs = await verifyThreadIDs([...threadIDsToFilterTo]); if (verifiedThreadIDs.length !== threadIDsToFilterTo.size) { throw new ServerError('invalid_parameters'); } } } async function entryFetchResponder( viewer: Viewer, input: any, ): Promise { await validateInput(viewer, entryQueryInputValidator, input); const request = normalizeCalendarQuery(input); await verifyCalendarQueryThreadIDs(request); const response = await fetchEntryInfos(viewer, [request]); return { ...response, userInfos: {} }; } const entryRevisionHistoryFetchInputValidator = tShape({ id: t.String, }); async function entryRevisionFetchResponder( viewer: Viewer, input: any, ): Promise { const request: FetchEntryRevisionInfosRequest = input; await validateInput(viewer, entryRevisionHistoryFetchInputValidator, request); const entryHistory = await fetchEntryRevisionInfo(viewer, request.id); return { result: entryHistory }; } const createEntryRequestInputValidator = tShape({ text: t.String, sessionID: t.maybe(t.String), timestamp: t.Number, date: tDate, threadID: t.String, localID: t.maybe(t.String), calendarQuery: t.maybe(newEntryQueryInputValidator), }); async function entryCreationResponder( viewer: Viewer, input: any, ): Promise { const request: CreateEntryRequest = input; await validateInput(viewer, createEntryRequestInputValidator, request); return await createEntry(viewer, request); } const saveEntryRequestInputValidator = tShape({ entryID: t.String, text: t.String, prevText: t.String, sessionID: t.maybe(t.String), timestamp: t.Number, calendarQuery: t.maybe(newEntryQueryInputValidator), }); async function entryUpdateResponder( viewer: Viewer, input: any, ): Promise { const request: SaveEntryRequest = input; await validateInput(viewer, saveEntryRequestInputValidator, request); return await updateEntry(viewer, request); } const deleteEntryRequestInputValidator = tShape({ entryID: t.String, prevText: t.String, sessionID: t.maybe(t.String), timestamp: t.Number, calendarQuery: t.maybe(newEntryQueryInputValidator), }); async function entryDeletionResponder( viewer: Viewer, input: any, ): Promise { const request: DeleteEntryRequest = input; await validateInput(viewer, deleteEntryRequestInputValidator, request); return await deleteEntry(viewer, request); } const restoreEntryRequestInputValidator = tShape({ entryID: t.String, sessionID: t.maybe(t.String), timestamp: t.Number, calendarQuery: t.maybe(newEntryQueryInputValidator), }); async function entryRestorationResponder( viewer: Viewer, input: any, ): Promise { const request: RestoreEntryRequest = input; await validateInput(viewer, restoreEntryRequestInputValidator, request); return await restoreEntry(viewer, request); } async function calendarQueryUpdateResponder( viewer: Viewer, input: any, ): Promise { const request: CalendarQuery = input; await validateInput(viewer, newEntryQueryInputValidator, input); await verifyCalendarQueryThreadIDs(request); if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const { difference, oldCalendarQuery, sessionUpdate, } = compareNewCalendarQuery(viewer, request); const [response] = await Promise.all([ fetchEntriesForSession(viewer, difference, oldCalendarQuery), commitSessionUpdate(viewer, sessionUpdate), ]); return { rawEntryInfos: response.rawEntryInfos, deletedEntryIDs: response.deletedEntryIDs, // Old clients expect userInfos object userInfos: [], }; } export { entryQueryInputValidator, newEntryQueryInputValidator, normalizeCalendarQuery, verifyCalendarQueryThreadIDs, entryFetchResponder, entryRevisionFetchResponder, entryCreationResponder, entryUpdateResponder, entryDeletionResponder, entryRestorationResponder, calendarQueryUpdateResponder, }; diff --git a/keyserver/src/responders/handlers.js b/keyserver/src/responders/handlers.js index ed32fd95b..17b2a976d 100644 --- a/keyserver/src/responders/handlers.js +++ b/keyserver/src/responders/handlers.js @@ -1,242 +1,245 @@ // @flow import type { $Response, $Request } from 'express'; -import { ServerError } from 'lib/utils/errors'; +import { ServerError } from 'lib/utils/errors.js'; -import { deleteCookie } from '../deleters/cookie-deleters'; +import { deleteCookie } from '../deleters/cookie-deleters.js'; import type { PolicyType } from '../lib/facts/policies.js'; import { fetchViewerForJSONRequest, addCookieToJSONResponse, fetchViewerForHomeRequest, addCookieToHomeResponse, createNewAnonymousCookie, -} from '../session/cookies'; -import type { Viewer } from '../session/viewer'; -import { type AppURLFacts, getAppURLFactsFromRequestURL } from '../utils/urls'; +} from '../session/cookies.js'; +import type { Viewer } from '../session/viewer.js'; +import { + type AppURLFacts, + getAppURLFactsFromRequestURL, +} from '../utils/urls.js'; import { policiesValidator } from '../utils/validation-utils.js'; -import { getMessageForException } from './utils'; +import { getMessageForException } from './utils.js'; export type JSONResponder = { responder: (viewer: Viewer, input: any) => Promise<*>, requiredPolicies: $ReadOnlyArray, }; export type DownloadResponder = ( viewer: Viewer, req: $Request, res: $Response, ) => Promise; export type HTMLResponder = DownloadResponder; export type HTTPGetResponder = DownloadResponder; function jsonHandler( responder: JSONResponder, expectCookieInvalidation: boolean, ): (req: $Request, res: $Response) => Promise { return async (req: $Request, res: $Response) => { let viewer; try { if (!req.body || typeof req.body !== 'object') { throw new ServerError('invalid_parameters'); } const { input } = req.body; viewer = await fetchViewerForJSONRequest(req); await policiesValidator(viewer, responder.requiredPolicies); const responderResult = await responder.responder(viewer, input); if (res.headersSent) { return; } const result = { ...responderResult }; addCookieToJSONResponse( viewer, res, result, expectCookieInvalidation, getAppURLFactsFromRequestURL(req.originalUrl), ); res.json({ success: true, ...result }); } catch (e) { await handleException( e, res, getAppURLFactsFromRequestURL(req.originalUrl), viewer, expectCookieInvalidation, ); } }; } function httpGetHandler( responder: HTTPGetResponder, ): (req: $Request, res: $Response) => Promise { return async (req: $Request, res: $Response) => { let viewer; try { viewer = await fetchViewerForJSONRequest(req); await responder(viewer, req, res); } catch (e) { await handleException( e, res, getAppURLFactsFromRequestURL(req.originalUrl), viewer, ); } }; } function downloadHandler( responder: DownloadResponder, ): (req: $Request, res: $Response) => Promise { return async (req: $Request, res: $Response) => { try { const viewer = await fetchViewerForJSONRequest(req); await responder(viewer, req, res); } catch (e) { // Passing viewer in only makes sense if we want to handle failures as // JSON. We don't, and presume all download handlers avoid ServerError. await handleException( e, res, getAppURLFactsFromRequestURL(req.originalUrl), ); } }; } async function handleException( error: Error, res: $Response, appURLFacts: AppURLFacts, viewer?: ?Viewer, expectCookieInvalidation?: boolean, ) { console.warn(error); if (res.headersSent) { return; } if (!(error instanceof ServerError)) { res.status(500).send(getMessageForException(error)); return; } const result: Object = error.payload ? { error: error.message, payload: error.payload } : { error: error.message }; if (viewer) { if (error.message === 'client_version_unsupported' && viewer.loggedIn) { // If the client version is unsupported, log the user out const { platformDetails } = error; const [data] = await Promise.all([ createNewAnonymousCookie({ platformDetails, deviceToken: viewer.deviceToken, }), deleteCookie(viewer.cookieID), ]); viewer.setNewCookie(data); viewer.cookieInvalidated = true; } // This can mutate the result object addCookieToJSONResponse( viewer, res, result, !!expectCookieInvalidation, appURLFacts, ); } res.json(result); } function htmlHandler( responder: HTMLResponder, ): (req: $Request, res: $Response) => Promise { return async (req: $Request, res: $Response) => { try { const viewer = await fetchViewerForHomeRequest(req); addCookieToHomeResponse( viewer, res, getAppURLFactsFromRequestURL(req.originalUrl), ); res.type('html'); await responder(viewer, req, res); } catch (e) { console.warn(e); if (!res.headersSent) { res.status(500).send(getMessageForException(e)); } } }; } type MulterFile = { fieldname: string, originalname: string, encoding: string, mimetype: string, buffer: Buffer, size: number, }; export type MulterRequest = $Request & { files?: $ReadOnlyArray, ... }; type UploadResponder = (viewer: Viewer, req: MulterRequest) => Promise; function uploadHandler( responder: UploadResponder, ): (req: $Request, res: $Response) => Promise { return async (req: $Request, res: $Response) => { let viewer; try { if (!req.body || typeof req.body !== 'object') { throw new ServerError('invalid_parameters'); } viewer = await fetchViewerForJSONRequest(req); const responderResult = await responder( viewer, ((req: any): MulterRequest), ); if (res.headersSent) { return; } const result = { ...responderResult }; addCookieToJSONResponse( viewer, res, result, false, getAppURLFactsFromRequestURL(req.originalUrl), ); res.json({ success: true, ...result }); } catch (e) { await handleException( e, res, getAppURLFactsFromRequestURL(req.originalUrl), viewer, ); } }; } async function handleAsyncPromise(promise: Promise) { try { await promise; } catch (error) { console.warn(error); } } export { jsonHandler, httpGetHandler, downloadHandler, htmlHandler, uploadHandler, handleAsyncPromise, }; diff --git a/keyserver/src/responders/keys-responders.js b/keyserver/src/responders/keys-responders.js index e5dc449a7..57691537c 100644 --- a/keyserver/src/responders/keys-responders.js +++ b/keyserver/src/responders/keys-responders.js @@ -1,29 +1,29 @@ // @flow import t from 'tcomb'; -import type { GetSessionPublicKeysArgs } from 'lib/types/request-types'; -import type { SessionPublicKeys } from 'lib/types/session-types'; -import { tShape } from 'lib/utils/validation-utils'; +import type { GetSessionPublicKeysArgs } from 'lib/types/request-types.js'; +import type { SessionPublicKeys } from 'lib/types/session-types.js'; +import { tShape } from 'lib/utils/validation-utils.js'; -import { fetchSessionPublicKeys } from '../fetchers/key-fetchers'; -import type { Viewer } from '../session/viewer'; -import { validateInput } from '../utils/validation-utils'; +import { fetchSessionPublicKeys } from '../fetchers/key-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; +import { validateInput } from '../utils/validation-utils.js'; const getSessionPublicKeysInputValidator = tShape({ session: t.String, }); async function getSessionPublicKeysResponder( viewer: Viewer, input: any, ): Promise { if (!viewer.loggedIn) { return null; } const request: GetSessionPublicKeysArgs = input; await validateInput(viewer, getSessionPublicKeysInputValidator, request); return await fetchSessionPublicKeys(request.session); } export { getSessionPublicKeysResponder }; diff --git a/keyserver/src/responders/landing-handler.js b/keyserver/src/responders/landing-handler.js index 464c40d45..d94c899aa 100644 --- a/keyserver/src/responders/landing-handler.js +++ b/keyserver/src/responders/landing-handler.js @@ -1,218 +1,218 @@ // @flow import html from 'common-tags/lib/html'; import type { $Response, $Request } from 'express'; import fs from 'fs'; import * as React from 'react'; import ReactDOMServer from 'react-dom/server'; import { promisify } from 'util'; import { isValidPrimaryIdentityPublicKey, isValidSIWENonce, } from 'lib/utils/siwe-utils.js'; -import { type LandingSSRProps } from '../landing/landing-ssr.react'; -import { waitForStream } from '../utils/json-stream'; -import { getAndAssertLandingURLFacts } from '../utils/urls'; -import { getMessageForException } from './utils'; +import { type LandingSSRProps } from '../landing/landing-ssr.react.js'; +import { waitForStream } from '../utils/json-stream.js'; +import { getAndAssertLandingURLFacts } from '../utils/urls.js'; +import { getMessageForException } from './utils.js'; async function landingHandler(req: $Request, res: $Response) { try { await landingResponder(req, res); } catch (e) { console.warn(e); if (!res.headersSent) { res.status(500).send(getMessageForException(e)); } } } const access = promisify(fs.access); const readFile = promisify(fs.readFile); const googleFontsURL = 'https://fonts.googleapis.com/css2?family=IBM+Plex+Mono:wght@500&family=IBM+Plex+Sans:wght@400;500&display=swap'; const iaDuoFontsURL = 'fonts/duo.css'; const localFontsURL = 'fonts/local-fonts.css'; async function getDevFontURLs(): Promise<$ReadOnlyArray> { try { await access(localFontsURL); return [localFontsURL, iaDuoFontsURL]; } catch { return [googleFontsURL, iaDuoFontsURL]; } } type AssetInfo = { +jsURL: string, +fontURLs: $ReadOnlyArray, +cssInclude: string, }; let assetInfo: ?AssetInfo = null; async function getAssetInfo() { if (assetInfo) { return assetInfo; } if (process.env.NODE_ENV === 'development') { const fontURLs = await getDevFontURLs(); assetInfo = { jsURL: 'http://localhost:8082/dev.build.js', fontURLs, cssInclude: '', }; return assetInfo; } try { const assetsString = await readFile('../landing/dist/assets.json', 'utf8'); const assets = JSON.parse(assetsString); assetInfo = { jsURL: `compiled/${assets.browser.js}`, fontURLs: [googleFontsURL, iaDuoFontsURL], cssInclude: html` `, }; return assetInfo; } catch { throw new Error( 'Could not load assets.json for landing build. ' + 'Did you forget to run `yarn dev` in the landing folder?', ); } } type LandingApp = React.ComponentType; let webpackCompiledRootComponent: ?LandingApp = null; async function getWebpackCompiledRootComponentForSSR() { if (webpackCompiledRootComponent) { return webpackCompiledRootComponent; } try { // $FlowFixMe landing/dist doesn't always exist const webpackBuild = await import('landing/dist/landing.build.cjs'); webpackCompiledRootComponent = webpackBuild.default.default; return webpackCompiledRootComponent; } catch { throw new Error( 'Could not load landing.build.cjs. ' + 'Did you forget to run `yarn dev` in the landing folder?', ); } } const { renderToNodeStream } = ReactDOMServer; async function landingResponder(req: $Request, res: $Response) { const siweNonce = req.header('siwe-nonce'); if ( siweNonce !== null && siweNonce !== undefined && !isValidSIWENonce(siweNonce) ) { res.status(400).send({ message: 'Invalid nonce in siwe-nonce header.', }); return; } const siwePrimaryIdentityPublicKey = req.header( 'siwe-primary-identity-public-key', ); if ( siwePrimaryIdentityPublicKey !== null && siwePrimaryIdentityPublicKey !== undefined && !isValidPrimaryIdentityPublicKey(siwePrimaryIdentityPublicKey) ) { res.status(400).send({ message: 'Invalid primary identity public key in siwe-primary-identity-public-key header.', }); return; } const [{ jsURL, fontURLs, cssInclude }, LandingSSR] = await Promise.all([ getAssetInfo(), getWebpackCompiledRootComponentForSSR(), ]); const fontsInclude = fontURLs .map(url => ``) .join(''); const urlFacts = getAndAssertLandingURLFacts(); const { basePath } = urlFacts; // prettier-ignore res.write(html` Comm ${fontsInclude} ${cssInclude}
`); // We remove trailing slash for `react-router` const routerBasename = basePath.replace(/\/$/, ''); const clientPath = routerBasename + req.url; const reactStream = renderToNodeStream( , ); reactStream.pipe(res, { end: false }); await waitForStream(reactStream); const siweNonceString = siweNonce ? `"${siweNonce}"` : 'null'; const siwePrimaryIdentityPublicKeyString = siwePrimaryIdentityPublicKey ? `"${siwePrimaryIdentityPublicKey}"` : 'null'; // prettier-ignore res.end(html`
`); } export default landingHandler; diff --git a/keyserver/src/responders/message-report-responder.js b/keyserver/src/responders/message-report-responder.js index 5463edda5..250088cff 100644 --- a/keyserver/src/responders/message-report-responder.js +++ b/keyserver/src/responders/message-report-responder.js @@ -1,34 +1,34 @@ // @flow import t from 'tcomb'; import { type MessageReportCreationRequest, type MessageReportCreationResult, -} from 'lib/types/message-report-types'; -import { tShape } from 'lib/utils/validation-utils'; +} from 'lib/types/message-report-types.js'; +import { tShape } from 'lib/utils/validation-utils.js'; -import createMessageReport from '../creators/message-report-creator'; -import type { Viewer } from '../session/viewer'; -import { validateInput } from '../utils/validation-utils'; +import createMessageReport from '../creators/message-report-creator.js'; +import type { Viewer } from '../session/viewer.js'; +import { validateInput } from '../utils/validation-utils.js'; const messageReportCreationRequestInputValidator = tShape({ messageID: t.String, }); async function messageReportCreationResponder( viewer: Viewer, input: any, ): Promise { await validateInput( viewer, messageReportCreationRequestInputValidator, input, ); const request: MessageReportCreationRequest = input; const rawMessageInfos = await createMessageReport(viewer, request); return { messageInfo: rawMessageInfos[0] }; } export { messageReportCreationResponder }; diff --git a/keyserver/src/responders/message-responders.js b/keyserver/src/responders/message-responders.js index 667cfe209..dd09b132b 100644 --- a/keyserver/src/responders/message-responders.js +++ b/keyserver/src/responders/message-responders.js @@ -1,268 +1,275 @@ // @flow import invariant from 'invariant'; import t from 'tcomb'; -import { onlyOneEmojiRegex } from 'lib/shared/emojis'; -import { createMediaMessageData, trimMessage } from 'lib/shared/message-utils'; -import { relationshipBlockedInEitherDirection } from 'lib/shared/relationship-utils'; +import { onlyOneEmojiRegex } from 'lib/shared/emojis.js'; +import { + createMediaMessageData, + trimMessage, +} from 'lib/shared/message-utils.js'; +import { relationshipBlockedInEitherDirection } from 'lib/shared/relationship-utils.js'; import type { Media } from 'lib/types/media-types.js'; import { messageTypes, type SendTextMessageRequest, type SendMultimediaMessageRequest, type SendReactionMessageRequest, type FetchMessageInfosResponse, type FetchMessageInfosRequest, defaultNumberPerThread, type SendMessageResponse, -} from 'lib/types/message-types'; -import type { ReactionMessageData } from 'lib/types/messages/reaction'; -import type { TextMessageData } from 'lib/types/messages/text'; -import { threadPermissions } from 'lib/types/thread-types'; -import { ServerError } from 'lib/utils/errors'; -import { tRegex, tShape, tMediaMessageMedia } from 'lib/utils/validation-utils'; - -import createMessages from '../creators/message-creator'; -import { SQL } from '../database/database'; +} from 'lib/types/message-types.js'; +import type { ReactionMessageData } from 'lib/types/messages/reaction.js'; +import type { TextMessageData } from 'lib/types/messages/text.js'; +import { threadPermissions } from 'lib/types/thread-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { + tRegex, + tShape, + tMediaMessageMedia, +} from 'lib/utils/validation-utils.js'; + +import createMessages from '../creators/message-creator.js'; +import { SQL } from '../database/database.js'; import { fetchMessageInfos, fetchMessageInfoForLocalID, fetchMessageInfoByID, -} from '../fetchers/message-fetchers'; -import { fetchServerThreadInfos } from '../fetchers/thread-fetchers'; -import { checkThreadPermission } from '../fetchers/thread-permission-fetchers'; +} from '../fetchers/message-fetchers.js'; +import { fetchServerThreadInfos } from '../fetchers/thread-fetchers.js'; +import { checkThreadPermission } from '../fetchers/thread-permission-fetchers.js'; import { fetchMedia, fetchMediaFromMediaMessageContent, -} from '../fetchers/upload-fetchers'; -import { fetchKnownUserInfos } from '../fetchers/user-fetchers'; -import type { Viewer } from '../session/viewer'; +} from '../fetchers/upload-fetchers.js'; +import { fetchKnownUserInfos } from '../fetchers/user-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; import { assignMedia, assignMessageContainerToMedia, -} from '../updaters/upload-updaters'; -import { validateInput } from '../utils/validation-utils'; +} from '../updaters/upload-updaters.js'; +import { validateInput } from '../utils/validation-utils.js'; const sendTextMessageRequestInputValidator = tShape({ threadID: t.String, localID: t.maybe(t.String), text: t.String, }); async function textMessageCreationResponder( viewer: Viewer, input: any, ): Promise { const request: SendTextMessageRequest = input; await validateInput(viewer, sendTextMessageRequestInputValidator, request); const { threadID, localID, text: rawText } = request; const text = trimMessage(rawText); if (!text) { throw new ServerError('invalid_parameters'); } const hasPermission = await checkThreadPermission( viewer, threadID, threadPermissions.VOICED, ); if (!hasPermission) { throw new ServerError('invalid_parameters'); } const messageData: TextMessageData = { type: messageTypes.TEXT, threadID, creatorID: viewer.id, time: Date.now(), text, }; if (localID) { messageData.localID = localID; } const rawMessageInfos = await createMessages(viewer, [messageData]); return { newMessageInfo: rawMessageInfos[0] }; } const fetchMessageInfosRequestInputValidator = tShape({ cursors: t.dict(t.String, t.maybe(t.String)), numberPerThread: t.maybe(t.Number), }); async function messageFetchResponder( viewer: Viewer, input: any, ): Promise { const request: FetchMessageInfosRequest = input; await validateInput(viewer, fetchMessageInfosRequestInputValidator, request); const response = await fetchMessageInfos( viewer, { threadCursors: request.cursors }, request.numberPerThread ? request.numberPerThread : defaultNumberPerThread, ); return { ...response, userInfos: {} }; } const sendMultimediaMessageRequestInputValidator = t.union([ tShape({ threadID: t.String, localID: t.String, mediaIDs: t.list(t.String), }), tShape({ threadID: t.String, localID: t.String, mediaMessageContents: t.list(tMediaMessageMedia), }), ]); async function multimediaMessageCreationResponder( viewer: Viewer, input: any, ): Promise { const request: SendMultimediaMessageRequest = input; await validateInput( viewer, sendMultimediaMessageRequestInputValidator, request, ); if ( (request.mediaIDs && request.mediaIDs.length === 0) || (request.mediaMessageContents && request.mediaMessageContents.length === 0) ) { throw new ServerError('invalid_parameters'); } const { threadID, localID } = request; const hasPermission = await checkThreadPermission( viewer, threadID, threadPermissions.VOICED, ); if (!hasPermission) { throw new ServerError('invalid_parameters'); } const existingMessageInfoPromise = fetchMessageInfoForLocalID( viewer, localID, ); const mediaPromise: Promise<$ReadOnlyArray> = request.mediaIDs ? fetchMedia(viewer, request.mediaIDs) : fetchMediaFromMediaMessageContent(viewer, request.mediaMessageContents); const [existingMessageInfo, media] = await Promise.all([ existingMessageInfoPromise, mediaPromise, ]); if (media.length === 0 && !existingMessageInfo) { throw new ServerError('invalid_parameters'); } const messageData = createMediaMessageData({ localID, threadID, creatorID: viewer.id, media, }); const [newMessageInfo] = await createMessages(viewer, [messageData]); const { id } = newMessageInfo; invariant( id !== null && id !== undefined, 'serverID should be set in createMessages result', ); if (request.mediaIDs) { await assignMedia(viewer, request.mediaIDs, id); } else { await assignMessageContainerToMedia( viewer, request.mediaMessageContents, id, ); } return { newMessageInfo }; } const sendReactionMessageRequestInputValidator = tShape({ threadID: t.String, localID: t.maybe(t.String), targetMessageID: t.String, reaction: tRegex(onlyOneEmojiRegex), action: t.enums.of(['add_reaction', 'remove_reaction']), }); async function reactionMessageCreationResponder( viewer: Viewer, input: any, ): Promise { const request: SendReactionMessageRequest = input; await validateInput(viewer, sendReactionMessageRequestInputValidator, input); const { threadID, localID, targetMessageID, reaction, action } = request; if (!targetMessageID || !reaction) { throw new ServerError('invalid_parameters'); } const targetMessageInfo = await fetchMessageInfoByID(viewer, targetMessageID); if (!targetMessageInfo || !targetMessageInfo.id) { throw new ServerError('invalid_parameters'); } const [ serverThreadInfos, hasPermission, targetMessageUserInfos, ] = await Promise.all([ fetchServerThreadInfos(SQL`t.id = ${threadID}`), checkThreadPermission(viewer, threadID, threadPermissions.VOICED), fetchKnownUserInfos(viewer, [targetMessageInfo.creatorID]), ]); const targetMessageThreadInfo = serverThreadInfos.threadInfos[threadID]; if (targetMessageThreadInfo.sourceMessageID === targetMessageID) { throw new ServerError('invalid_parameters'); } const targetMessageCreator = targetMessageUserInfos[targetMessageInfo.creatorID]; const targetMessageCreatorRelationship = targetMessageCreator?.relationshipStatus; const creatorRelationshipHasBlock = targetMessageCreatorRelationship && relationshipBlockedInEitherDirection(targetMessageCreatorRelationship); if (!hasPermission || creatorRelationshipHasBlock) { throw new ServerError('invalid_parameters'); } let messageData: ReactionMessageData = { type: messageTypes.REACTION, threadID, creatorID: viewer.id, time: Date.now(), targetMessageID, reaction, action, }; if (localID) { messageData = { ...messageData, localID }; } const rawMessageInfos = await createMessages(viewer, [messageData]); return { newMessageInfo: rawMessageInfos[0] }; } export { textMessageCreationResponder, messageFetchResponder, multimediaMessageCreationResponder, reactionMessageCreationResponder, }; diff --git a/keyserver/src/responders/relationship-responders.js b/keyserver/src/responders/relationship-responders.js index 2bfcf6b50..0d2130a9d 100644 --- a/keyserver/src/responders/relationship-responders.js +++ b/keyserver/src/responders/relationship-responders.js @@ -1,30 +1,30 @@ // @flow import t from 'tcomb'; import { type RelationshipRequest, type RelationshipErrors, relationshipActionsList, -} from 'lib/types/relationship-types'; -import { tShape } from 'lib/utils/validation-utils'; +} from 'lib/types/relationship-types.js'; +import { tShape } from 'lib/utils/validation-utils.js'; -import type { Viewer } from '../session/viewer'; -import { updateRelationships } from '../updaters/relationship-updaters'; -import { validateInput } from '../utils/validation-utils'; +import type { Viewer } from '../session/viewer.js'; +import { updateRelationships } from '../updaters/relationship-updaters.js'; +import { validateInput } from '../utils/validation-utils.js'; const updateRelationshipInputValidator = tShape({ action: t.enums.of(relationshipActionsList, 'relationship action'), userIDs: t.list(t.String), }); async function updateRelationshipsResponder( viewer: Viewer, input: any, ): Promise { const request: RelationshipRequest = input; await validateInput(viewer, updateRelationshipInputValidator, request); return await updateRelationships(viewer, request); } export { updateRelationshipsResponder }; diff --git a/keyserver/src/responders/report-responders.js b/keyserver/src/responders/report-responders.js index 61e540e0c..4348ba065 100644 --- a/keyserver/src/responders/report-responders.js +++ b/keyserver/src/responders/report-responders.js @@ -1,239 +1,239 @@ // @flow import type { $Response, $Request } from 'express'; import t from 'tcomb'; import type { TStructProps } from 'tcomb'; import { type ReportCreationResponse, type ReportCreationRequest, type FetchErrorReportInfosResponse, type FetchErrorReportInfosRequest, reportTypes, -} from 'lib/types/report-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/report-types.js'; +import { ServerError } from 'lib/utils/errors.js'; import { tShape, tPlatform, tPlatformDetails, -} from 'lib/utils/validation-utils'; +} from 'lib/utils/validation-utils.js'; -import createReport from '../creators/report-creator'; +import createReport from '../creators/report-creator.js'; import { fetchErrorReportInfos, fetchReduxToolsImport, -} from '../fetchers/report-fetchers'; -import type { Viewer } from '../session/viewer'; -import { validateInput } from '../utils/validation-utils'; -import { newEntryQueryInputValidator } from './entry-responders'; +} from '../fetchers/report-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; +import { validateInput } from '../utils/validation-utils.js'; +import { newEntryQueryInputValidator } from './entry-responders.js'; const tActionSummary = tShape({ type: t.String, time: t.Number, summary: t.String, }); const threadInconsistencyReportValidatorShape: TStructProps = { platformDetails: tPlatformDetails, beforeAction: t.Object, action: t.Object, pollResult: t.maybe(t.Object), pushResult: t.Object, lastActionTypes: t.maybe(t.list(t.String)), lastActions: t.maybe(t.list(tActionSummary)), time: t.maybe(t.Number), }; const entryInconsistencyReportValidatorShape: TStructProps = { platformDetails: tPlatformDetails, beforeAction: t.Object, action: t.Object, calendarQuery: newEntryQueryInputValidator, pollResult: t.maybe(t.Object), pushResult: t.Object, lastActionTypes: t.maybe(t.list(t.String)), lastActions: t.maybe(t.list(tActionSummary)), time: t.Number, }; const userInconsistencyReportValidatorShape = { platformDetails: tPlatformDetails, action: t.Object, beforeStateCheck: t.Object, afterStateCheck: t.Object, lastActions: t.list(tActionSummary), time: t.Number, }; const threadInconsistencyReportCreationRequest = tShape({ ...threadInconsistencyReportValidatorShape, type: t.irreducible( 'reportTypes.THREAD_INCONSISTENCY', x => x === reportTypes.THREAD_INCONSISTENCY, ), }); const entryInconsistencyReportCreationRquest = tShape({ ...entryInconsistencyReportValidatorShape, type: t.irreducible( 'reportTypes.ENTRY_INCONSISTENCY', x => x === reportTypes.ENTRY_INCONSISTENCY, ), }); const mediaMissionReportCreationRequest = tShape({ type: t.irreducible( 'reportTypes.MEDIA_MISSION', x => x === reportTypes.MEDIA_MISSION, ), platformDetails: tPlatformDetails, time: t.Number, mediaMission: t.Object, uploadServerID: t.maybe(t.String), uploadLocalID: t.maybe(t.String), mediaLocalID: t.maybe(t.String), messageServerID: t.maybe(t.String), messageLocalID: t.maybe(t.String), }); const userInconsistencyReportCreationRequest = tShape({ ...userInconsistencyReportValidatorShape, type: t.irreducible( 'reportTypes.USER_INCONSISTENCY', x => x === reportTypes.USER_INCONSISTENCY, ), }); const reportCreationRequestInputValidator = t.union([ tShape({ type: t.maybe( t.irreducible('reportTypes.ERROR', x => x === reportTypes.ERROR), ), platformDetails: t.maybe(tPlatformDetails), deviceType: t.maybe(tPlatform), codeVersion: t.maybe(t.Number), stateVersion: t.maybe(t.Number), errors: t.list( tShape({ errorMessage: t.String, stack: t.maybe(t.String), componentStack: t.maybe(t.String), }), ), preloadedState: t.Object, currentState: t.Object, actions: t.list(t.union([t.Object, t.String])), }), threadInconsistencyReportCreationRequest, entryInconsistencyReportCreationRquest, mediaMissionReportCreationRequest, userInconsistencyReportCreationRequest, ]); async function reportCreationResponder( viewer: Viewer, input: any, ): Promise { await validateInput(viewer, reportCreationRequestInputValidator, input); if (input.type === null || input.type === undefined) { input.type = reportTypes.ERROR; } if (!input.platformDetails && input.deviceType) { const { deviceType, codeVersion, stateVersion, ...rest } = input; input = { ...rest, platformDetails: { platform: deviceType, codeVersion, stateVersion }, }; } const request: ReportCreationRequest = input; const response = await createReport(viewer, request); if (!response) { throw new ServerError('ignored_report'); } return response; } const reportMultiCreationRequestInputValidator = tShape({ reports: t.list( t.union([ tShape({ type: t.irreducible('reportTypes.ERROR', x => x === reportTypes.ERROR), platformDetails: tPlatformDetails, errors: t.list( tShape({ errorMessage: t.String, stack: t.maybe(t.String), componentStack: t.maybe(t.String), }), ), preloadedState: t.Object, currentState: t.Object, actions: t.list(t.union([t.Object, t.String])), }), threadInconsistencyReportCreationRequest, entryInconsistencyReportCreationRquest, mediaMissionReportCreationRequest, userInconsistencyReportCreationRequest, ]), ), }); type ReportMultiCreationRequest = { reports: $ReadOnlyArray, }; async function reportMultiCreationResponder( viewer: Viewer, input: any, ): Promise { const request: ReportMultiCreationRequest = input; await validateInput( viewer, reportMultiCreationRequestInputValidator, request, ); await Promise.all( request.reports.map(reportCreationRequest => createReport(viewer, reportCreationRequest), ), ); } const fetchErrorReportInfosRequestInputValidator = tShape({ cursor: t.maybe(t.String), }); async function errorReportFetchInfosResponder( viewer: Viewer, input: any, ): Promise { const request: FetchErrorReportInfosRequest = input; await validateInput( viewer, fetchErrorReportInfosRequestInputValidator, request, ); return await fetchErrorReportInfos(viewer, request); } async function errorReportDownloadResponder( viewer: Viewer, req: $Request, res: $Response, ): Promise { const id = req.params.reportID; if (!id) { throw new ServerError('invalid_parameters'); } const result = await fetchReduxToolsImport(viewer, id); res.set('Content-Disposition', `attachment; filename=report-${id}.json`); res.json({ preloadedState: JSON.stringify(result.preloadedState), payload: JSON.stringify(result.payload), }); } export { threadInconsistencyReportValidatorShape, entryInconsistencyReportValidatorShape, reportCreationResponder, reportMultiCreationResponder, errorReportFetchInfosResponder, errorReportDownloadResponder, }; diff --git a/keyserver/src/responders/search-responders.js b/keyserver/src/responders/search-responders.js index 7df6182e8..d8c5a89e2 100644 --- a/keyserver/src/responders/search-responders.js +++ b/keyserver/src/responders/search-responders.js @@ -1,29 +1,29 @@ // @flow import t from 'tcomb'; import type { UserSearchRequest, UserSearchResult, -} from 'lib/types/search-types'; -import { tShape } from 'lib/utils/validation-utils'; +} from 'lib/types/search-types.js'; +import { tShape } from 'lib/utils/validation-utils.js'; -import { searchForUsers } from '../search/users'; -import type { Viewer } from '../session/viewer'; -import { validateInput } from '../utils/validation-utils'; +import { searchForUsers } from '../search/users.js'; +import type { Viewer } from '../session/viewer.js'; +import { validateInput } from '../utils/validation-utils.js'; const userSearchRequestInputValidator = tShape({ prefix: t.maybe(t.String), }); async function userSearchResponder( viewer: Viewer, input: any, ): Promise { const request: UserSearchRequest = input; await validateInput(viewer, userSearchRequestInputValidator, request); const searchResults = await searchForUsers(request); return { userInfos: searchResults }; } export { userSearchResponder }; diff --git a/keyserver/src/responders/siwe-nonce-responders.js b/keyserver/src/responders/siwe-nonce-responders.js index 2f5af4506..69636c117 100644 --- a/keyserver/src/responders/siwe-nonce-responders.js +++ b/keyserver/src/responders/siwe-nonce-responders.js @@ -1,15 +1,15 @@ // @flow import { generateNonce } from 'siwe'; -import type { SIWENonceResponse } from 'lib/types/siwe-types'; +import type { SIWENonceResponse } from 'lib/types/siwe-types.js'; -import { createSIWENonceEntry } from '../creators/siwe-nonce-creator'; +import { createSIWENonceEntry } from '../creators/siwe-nonce-creator.js'; async function siweNonceResponder(): Promise { const generatedNonce = generateNonce(); await createSIWENonceEntry(generatedNonce); return { nonce: generatedNonce }; } export { siweNonceResponder }; diff --git a/keyserver/src/responders/thread-responders.js b/keyserver/src/responders/thread-responders.js index 3b440f560..17f3338dc 100644 --- a/keyserver/src/responders/thread-responders.js +++ b/keyserver/src/responders/thread-responders.js @@ -1,188 +1,188 @@ // @flow import t from 'tcomb'; import type { TUnion, TInterface } from 'tcomb'; import { type ThreadDeletionRequest, type RoleChangeRequest, type ChangeThreadSettingsResult, type RemoveMembersRequest, type LeaveThreadRequest, type LeaveThreadResult, type UpdateThreadRequest, type ServerNewThreadRequest, type NewThreadResponse, type ServerThreadJoinRequest, type ThreadJoinResult, threadTypes, -} from 'lib/types/thread-types'; -import { values } from 'lib/utils/objects'; +} from 'lib/types/thread-types.js'; +import { values } from 'lib/utils/objects.js'; import { tShape, tNumEnum, tColor, tPassword, -} from 'lib/utils/validation-utils'; +} from 'lib/utils/validation-utils.js'; -import { createThread } from '../creators/thread-creator'; -import { deleteThread } from '../deleters/thread-deleters'; -import type { Viewer } from '../session/viewer'; +import { createThread } from '../creators/thread-creator.js'; +import { deleteThread } from '../deleters/thread-deleters.js'; +import type { Viewer } from '../session/viewer.js'; import { updateRole, removeMembers, leaveThread, updateThread, joinThread, -} from '../updaters/thread-updaters'; -import { validateInput } from '../utils/validation-utils'; +} from '../updaters/thread-updaters.js'; +import { validateInput } from '../utils/validation-utils.js'; import { entryQueryInputValidator, verifyCalendarQueryThreadIDs, -} from './entry-responders'; +} from './entry-responders.js'; const threadDeletionRequestInputValidator = tShape({ threadID: t.String, accountPassword: t.maybe(tPassword), }); async function threadDeletionResponder( viewer: Viewer, input: any, ): Promise { const request: ThreadDeletionRequest = input; await validateInput(viewer, threadDeletionRequestInputValidator, request); return await deleteThread(viewer, request); } const roleChangeRequestInputValidator = tShape({ threadID: t.String, memberIDs: t.list(t.String), role: t.refinement(t.String, str => { const int = parseInt(str, 10); return String(int) === str && int > 0; }), }); async function roleUpdateResponder( viewer: Viewer, input: any, ): Promise { const request: RoleChangeRequest = input; await validateInput(viewer, roleChangeRequestInputValidator, request); return await updateRole(viewer, request); } const removeMembersRequestInputValidator = tShape({ threadID: t.String, memberIDs: t.list(t.String), }); async function memberRemovalResponder( viewer: Viewer, input: any, ): Promise { const request: RemoveMembersRequest = input; await validateInput(viewer, removeMembersRequestInputValidator, request); return await removeMembers(viewer, request); } const leaveThreadRequestInputValidator = tShape({ threadID: t.String, }); async function threadLeaveResponder( viewer: Viewer, input: any, ): Promise { const request: LeaveThreadRequest = input; await validateInput(viewer, leaveThreadRequestInputValidator, request); return await leaveThread(viewer, request); } const updateThreadRequestInputValidator = tShape({ threadID: t.String, changes: tShape({ type: t.maybe(tNumEnum(values(threadTypes))), name: t.maybe(t.String), description: t.maybe(t.String), color: t.maybe(tColor), parentThreadID: t.maybe(t.String), newMemberIDs: t.maybe(t.list(t.String)), }), accountPassword: t.maybe(tPassword), }); async function threadUpdateResponder( viewer: Viewer, input: any, ): Promise { const request: UpdateThreadRequest = input; await validateInput(viewer, updateThreadRequestInputValidator, request); return await updateThread(viewer, request); } const threadRequestValidationShape = { name: t.maybe(t.String), description: t.maybe(t.String), color: t.maybe(tColor), parentThreadID: t.maybe(t.String), initialMemberIDs: t.maybe(t.list(t.String)), calendarQuery: t.maybe(entryQueryInputValidator), }; const newThreadRequestInputValidator: TUnion = t.union([ tShape({ type: tNumEnum([threadTypes.SIDEBAR]), sourceMessageID: t.String, ...threadRequestValidationShape, }), tShape({ type: tNumEnum([ threadTypes.COMMUNITY_OPEN_SUBTHREAD, threadTypes.COMMUNITY_SECRET_SUBTHREAD, threadTypes.PERSONAL, threadTypes.LOCAL, ]), ...threadRequestValidationShape, }), ]); async function threadCreationResponder( viewer: Viewer, input: any, ): Promise { const request: ServerNewThreadRequest = input; await validateInput(viewer, newThreadRequestInputValidator, request); return await createThread(viewer, request, { silentlyFailMembers: request.type === threadTypes.SIDEBAR, }); } const joinThreadRequestInputValidator = tShape({ threadID: t.String, calendarQuery: t.maybe(entryQueryInputValidator), }); async function threadJoinResponder( viewer: Viewer, input: any, ): Promise { const request: ServerThreadJoinRequest = input; await validateInput(viewer, joinThreadRequestInputValidator, request); if (request.calendarQuery) { await verifyCalendarQueryThreadIDs(request.calendarQuery); } return await joinThread(viewer, request); } export { threadDeletionResponder, roleUpdateResponder, memberRemovalResponder, threadLeaveResponder, threadUpdateResponder, threadCreationResponder, threadJoinResponder, newThreadRequestInputValidator, }; diff --git a/keyserver/src/responders/thread-responders.test.js b/keyserver/src/responders/thread-responders.test.js index ec4085f6b..b79c66540 100644 --- a/keyserver/src/responders/thread-responders.test.js +++ b/keyserver/src/responders/thread-responders.test.js @@ -1,53 +1,53 @@ // @flow -import { threadTypes } from 'lib/types/thread-types'; +import { threadTypes } from 'lib/types/thread-types.js'; -import { newThreadRequestInputValidator } from './thread-responders'; +import { newThreadRequestInputValidator } from './thread-responders.js'; describe('Thread responders', () => { describe('New thread request validator', () => { const requestWithoutMessageID = { name: 'name', description: 'description', color: 'aaaaaa', parentThreadID: 'parentID', initialMemberIDs: [], }; const requestWithMessageID = { ...requestWithoutMessageID, sourceMessageID: 'messageID', }; it('Should require sourceMessageID of a sidebar', () => { expect( newThreadRequestInputValidator.is({ type: threadTypes.SIDEBAR, ...requestWithoutMessageID, }), ).toBe(false); expect( newThreadRequestInputValidator.is({ type: threadTypes.SIDEBAR, ...requestWithMessageID, }), ).toBe(true); }); it('Should not require sourceMessageID of not a sidebar', () => { expect( newThreadRequestInputValidator.is({ type: threadTypes.LOCAL, ...requestWithoutMessageID, }), ).toBe(true); expect( newThreadRequestInputValidator.is({ type: threadTypes.LOCAL, ...requestWithMessageID, }), ).toBe(false); }); }); }); diff --git a/keyserver/src/responders/user-responders.js b/keyserver/src/responders/user-responders.js index 37cf06c3f..3ad6d091e 100644 --- a/keyserver/src/responders/user-responders.js +++ b/keyserver/src/responders/user-responders.js @@ -1,532 +1,532 @@ // @flow import invariant from 'invariant'; import { ErrorTypes, SiweMessage } from 'siwe'; import t from 'tcomb'; import bcrypt from 'twin-bcrypt'; import { baseLegalPolicies, policies } from 'lib/facts/policies.js'; -import { hasMinCodeVersion } from 'lib/shared/version-utils'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; import type { ResetPasswordRequest, LogOutResponse, DeleteAccountRequest, RegisterResponse, RegisterRequest, LogInResponse, LogInRequest, UpdatePasswordRequest, UpdateUserSettingsRequest, PolicyAcknowledgmentRequest, -} from 'lib/types/account-types'; +} from 'lib/types/account-types.js'; import { userSettingsTypes, notificationTypeValues, logInActionSources, -} from 'lib/types/account-types'; +} from 'lib/types/account-types.js'; import type { CalendarQuery } from 'lib/types/entry-types.js'; -import { defaultNumberPerThread } from 'lib/types/message-types'; +import { defaultNumberPerThread } from 'lib/types/message-types.js'; import type { SIWEAuthRequest, SIWEMessage, SIWESocialProof, } from 'lib/types/siwe-types.js'; import type { SubscriptionUpdateRequest, SubscriptionUpdateResponse, -} from 'lib/types/subscription-types'; -import type { PasswordUpdate } from 'lib/types/user-types'; -import { ServerError } from 'lib/utils/errors'; -import { values } from 'lib/utils/objects'; -import { promiseAll } from 'lib/utils/promises'; +} from 'lib/types/subscription-types.js'; +import type { PasswordUpdate } from 'lib/types/user-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { values } from 'lib/utils/objects.js'; +import { promiseAll } from 'lib/utils/promises.js'; import { getPublicKeyFromSIWEStatement, isValidSIWEMessage, isValidSIWEStatementWithPublicKey, primaryIdentityPublicKeyRegex, } from 'lib/utils/siwe-utils.js'; import { tShape, tPlatformDetails, tPassword, tEmail, tOldValidUsername, tRegex, -} from 'lib/utils/validation-utils'; +} from 'lib/utils/validation-utils.js'; import { createAccount, processSIWEAccountCreation, -} from '../creators/account-creator'; -import { dbQuery, SQL } from '../database/database'; -import { deleteAccount } from '../deleters/account-deleters'; -import { deleteCookie } from '../deleters/cookie-deleters'; +} from '../creators/account-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { deleteAccount } from '../deleters/account-deleters.js'; +import { deleteCookie } from '../deleters/cookie-deleters.js'; import { checkAndInvalidateSIWENonceEntry } from '../deleters/siwe-nonce-deleters.js'; -import { fetchEntryInfos } from '../fetchers/entry-fetchers'; -import { fetchMessageInfos } from '../fetchers/message-fetchers'; +import { fetchEntryInfos } from '../fetchers/entry-fetchers.js'; +import { fetchMessageInfos } from '../fetchers/message-fetchers.js'; import { fetchNotAcknowledgedPolicies } from '../fetchers/policy-acknowledgment-fetchers.js'; -import { fetchThreadInfos } from '../fetchers/thread-fetchers'; +import { fetchThreadInfos } from '../fetchers/thread-fetchers.js'; import { fetchKnownUserInfos, fetchLoggedInUserInfo, fetchUserIDForEthereumAddress, -} from '../fetchers/user-fetchers'; +} from '../fetchers/user-fetchers.js'; import { createNewAnonymousCookie, createNewUserCookie, setNewSession, -} from '../session/cookies'; -import type { Viewer } from '../session/viewer'; +} from '../session/cookies.js'; +import type { Viewer } from '../session/viewer.js'; import { accountUpdater, checkAndSendVerificationEmail, checkAndSendPasswordResetEmail, updatePassword, updateUserSettings, -} from '../updaters/account-updaters'; -import { userSubscriptionUpdater } from '../updaters/user-subscription-updaters'; +} from '../updaters/account-updaters.js'; +import { userSubscriptionUpdater } from '../updaters/user-subscription-updaters.js'; import { viewerAcknowledgmentUpdater } from '../updaters/viewer-acknowledgment-updater.js'; -import { validateInput } from '../utils/validation-utils'; +import { validateInput } from '../utils/validation-utils.js'; import { entryQueryInputValidator, newEntryQueryInputValidator, normalizeCalendarQuery, verifyCalendarQueryThreadIDs, -} from './entry-responders'; +} from './entry-responders.js'; const subscriptionUpdateRequestInputValidator = tShape({ threadID: t.String, updatedFields: tShape({ pushNotifs: t.maybe(t.Boolean), home: t.maybe(t.Boolean), }), }); async function userSubscriptionUpdateResponder( viewer: Viewer, input: any, ): Promise { const request: SubscriptionUpdateRequest = input; await validateInput(viewer, subscriptionUpdateRequestInputValidator, request); const threadSubscription = await userSubscriptionUpdater(viewer, request); return { threadSubscription }; } const accountUpdateInputValidator = tShape({ updatedFields: tShape({ email: t.maybe(tEmail), password: t.maybe(tPassword), }), currentPassword: tPassword, }); async function passwordUpdateResponder( viewer: Viewer, input: any, ): Promise { const request: PasswordUpdate = input; await validateInput(viewer, accountUpdateInputValidator, request); await accountUpdater(viewer, request); } async function sendVerificationEmailResponder(viewer: Viewer): Promise { await validateInput(viewer, null, null); await checkAndSendVerificationEmail(viewer); } const resetPasswordRequestInputValidator = tShape({ usernameOrEmail: t.union([tEmail, tOldValidUsername]), }); async function sendPasswordResetEmailResponder( viewer: Viewer, input: any, ): Promise { const request: ResetPasswordRequest = input; await validateInput(viewer, resetPasswordRequestInputValidator, request); await checkAndSendPasswordResetEmail(request); } async function logOutResponder(viewer: Viewer): Promise { await validateInput(viewer, null, null); if (viewer.loggedIn) { const [anonymousViewerData] = await Promise.all([ createNewAnonymousCookie({ platformDetails: viewer.platformDetails, deviceToken: viewer.deviceToken, }), deleteCookie(viewer.cookieID), ]); viewer.setNewCookie(anonymousViewerData); } return { currentUserInfo: { id: viewer.id, anonymous: true, }, }; } const deleteAccountRequestInputValidator = tShape({ password: t.maybe(tPassword), }); async function accountDeletionResponder( viewer: Viewer, input: any, ): Promise { const request: DeleteAccountRequest = input; await validateInput(viewer, deleteAccountRequestInputValidator, request); const result = await deleteAccount(viewer, request); invariant(result, 'deleteAccount should return result if handed request'); return result; } const deviceTokenUpdateRequestInputValidator = tShape({ deviceType: t.maybe(t.enums.of(['ios', 'android'])), deviceToken: t.String, }); const registerRequestInputValidator = tShape({ username: t.String, email: t.maybe(tEmail), password: tPassword, calendarQuery: t.maybe(newEntryQueryInputValidator), deviceTokenUpdateRequest: t.maybe(deviceTokenUpdateRequestInputValidator), platformDetails: tPlatformDetails, }); async function accountCreationResponder( viewer: Viewer, input: any, ): Promise { const request: RegisterRequest = input; await validateInput(viewer, registerRequestInputValidator, request); return await createAccount(viewer, request); } async function processSuccessfulLogin( viewer: Viewer, input: any, userID: string, calendarQuery: ?CalendarQuery, primaryIdentityPublicKey?: ?string, socialProof?: ?SIWESocialProof, ): Promise { const request: LogInRequest = input; const newServerTime = Date.now(); const deviceToken = request.deviceTokenUpdateRequest ? request.deviceTokenUpdateRequest.deviceToken : viewer.deviceToken; const [userViewerData, notAcknowledgedPolicies] = await Promise.all([ createNewUserCookie(userID, { platformDetails: request.platformDetails, deviceToken, primaryIdentityPublicKey, socialProof, }), fetchNotAcknowledgedPolicies(userID, baseLegalPolicies), deleteCookie(viewer.cookieID), ]); viewer.setNewCookie(userViewerData); if ( notAcknowledgedPolicies.length && hasMinCodeVersion(viewer.platformDetails, 181) ) { const currentUserInfo = await fetchLoggedInUserInfo(viewer); return { notAcknowledgedPolicies, currentUserInfo: currentUserInfo, rawMessageInfos: [], truncationStatuses: {}, userInfos: [], rawEntryInfos: [], serverTime: 0, cookieChange: { threadInfos: {}, userInfos: [], }, }; } if (calendarQuery) { await setNewSession(viewer, calendarQuery, newServerTime); } const threadCursors = {}; for (const watchedThreadID of request.watchedIDs) { threadCursors[watchedThreadID] = null; } const messageSelectionCriteria = { threadCursors, joinedThreads: true }; const [ threadsResult, messagesResult, entriesResult, userInfos, currentUserInfo, ] = await Promise.all([ fetchThreadInfos(viewer), fetchMessageInfos(viewer, messageSelectionCriteria, defaultNumberPerThread), calendarQuery ? fetchEntryInfos(viewer, [calendarQuery]) : undefined, fetchKnownUserInfos(viewer), fetchLoggedInUserInfo(viewer), ]); const rawEntryInfos = entriesResult ? entriesResult.rawEntryInfos : null; const response: LogInResponse = { currentUserInfo, rawMessageInfos: messagesResult.rawMessageInfos, truncationStatuses: messagesResult.truncationStatuses, serverTime: newServerTime, userInfos: values(userInfos), cookieChange: { threadInfos: threadsResult.threadInfos, userInfos: [], }, }; if (rawEntryInfos) { return { ...response, rawEntryInfos, }; } return response; } const logInRequestInputValidator = tShape({ username: t.maybe(t.String), usernameOrEmail: t.maybe(t.union([tEmail, tOldValidUsername])), password: tPassword, watchedIDs: t.list(t.String), calendarQuery: t.maybe(entryQueryInputValidator), deviceTokenUpdateRequest: t.maybe(deviceTokenUpdateRequestInputValidator), platformDetails: tPlatformDetails, source: t.maybe(t.enums.of(values(logInActionSources))), primaryIdentityPublicKey: t.maybe(tRegex(primaryIdentityPublicKeyRegex)), }); async function logInResponder( viewer: Viewer, input: any, ): Promise { await validateInput(viewer, logInRequestInputValidator, input); const request: LogInRequest = input; const calendarQuery = request.calendarQuery ? normalizeCalendarQuery(request.calendarQuery) : null; const promises = {}; if (calendarQuery) { promises.verifyCalendarQueryThreadIDs = verifyCalendarQueryThreadIDs( calendarQuery, ); } const username = request.username ?? request.usernameOrEmail; if (!username) { if (hasMinCodeVersion(viewer.platformDetails, 150)) { throw new ServerError('invalid_credentials'); } else { throw new ServerError('invalid_parameters'); } } const userQuery = SQL` SELECT id, hash, username FROM users WHERE LCASE(username) = LCASE(${username}) `; promises.userQuery = dbQuery(userQuery); const { userQuery: [userResult], } = await promiseAll(promises); if (userResult.length === 0) { if (hasMinCodeVersion(viewer.platformDetails, 150)) { throw new ServerError('invalid_credentials'); } else { throw new ServerError('invalid_parameters'); } } const userRow = userResult[0]; if (!userRow.hash || !bcrypt.compareSync(request.password, userRow.hash)) { throw new ServerError('invalid_credentials'); } const id = userRow.id.toString(); return await processSuccessfulLogin(viewer, input, id, calendarQuery); } const siweAuthRequestInputValidator = tShape({ signature: t.String, message: t.String, calendarQuery: entryQueryInputValidator, deviceTokenUpdateRequest: t.maybe(deviceTokenUpdateRequestInputValidator), platformDetails: tPlatformDetails, watchedIDs: t.list(t.String), }); async function siweAuthResponder( viewer: Viewer, input: any, ): Promise { await validateInput(viewer, siweAuthRequestInputValidator, input); const request: SIWEAuthRequest = input; const { message, signature, deviceTokenUpdateRequest, platformDetails, } = request; const calendarQuery = normalizeCalendarQuery(request.calendarQuery); // 1. Ensure that `message` is a well formed Comm SIWE Auth message. const siweMessage: SIWEMessage = new SiweMessage(message); if (!isValidSIWEMessage(siweMessage)) { throw new ServerError('invalid_parameters'); } // 2. Ensure that the `nonce` exists in the `siwe_nonces` table // AND hasn't expired. If those conditions are met, delete the entry to // ensure that the same `nonce` can't be re-used in a future request. const wasNonceCheckedAndInvalidated = await checkAndInvalidateSIWENonceEntry( siweMessage.nonce, ); if (!wasNonceCheckedAndInvalidated) { throw new ServerError('invalid_parameters'); } // 3. Validate SIWEMessage signature and handle possible errors. try { await siweMessage.validate(signature); } catch (error) { if (error === ErrorTypes.EXPIRED_MESSAGE) { // Thrown when the `expirationTime` is present and in the past. throw new ServerError('expired_message'); } else if (error === ErrorTypes.INVALID_SIGNATURE) { // Thrown when the `validate()` function can't verify the message. throw new ServerError('invalid_signature'); } else if (error === ErrorTypes.MALFORMED_SESSION) { // Thrown when some required field is missing. throw new ServerError('malformed_session'); } else { throw new ServerError('unknown_error'); } } // 4. Pull `primaryIdentityPublicKey` out from SIWEMessage `statement` // if it was included. We expect it to be included for native clients, // and we expect it to be EXCLUDED for web clients. const { statement } = siweMessage; const primaryIdentityPublicKey = statement && isValidSIWEStatementWithPublicKey(statement) ? getPublicKeyFromSIWEStatement(statement) : null; // 5. Construct `SIWESocialProof` object with the stringified // SIWEMessage and the corresponding signature. const socialProof: SIWESocialProof = { siweMessage: siweMessage.toMessage(), siweMessageSignature: signature, }; // 6. Create account with call to `processSIWEAccountCreation(...)` // if address does not correspond to an existing user. let userID = await fetchUserIDForEthereumAddress(siweMessage.address); if (!userID) { const siweAccountCreationRequest = { address: siweMessage.address, calendarQuery, deviceTokenUpdateRequest, platformDetails, primaryIdentityPublicKey, socialProof, }; userID = await processSIWEAccountCreation( viewer, siweAccountCreationRequest, ); } // 7. Complete login with call to `processSuccessfulLogin(...)`. return await processSuccessfulLogin( viewer, input, userID, calendarQuery, primaryIdentityPublicKey, socialProof, ); } const updatePasswordRequestInputValidator = tShape({ code: t.String, password: tPassword, watchedIDs: t.list(t.String), calendarQuery: t.maybe(entryQueryInputValidator), deviceTokenUpdateRequest: t.maybe(deviceTokenUpdateRequestInputValidator), platformDetails: tPlatformDetails, }); async function oldPasswordUpdateResponder( viewer: Viewer, input: any, ): Promise { await validateInput(viewer, updatePasswordRequestInputValidator, input); const request: UpdatePasswordRequest = input; if (request.calendarQuery) { request.calendarQuery = normalizeCalendarQuery(request.calendarQuery); } return await updatePassword(viewer, request); } const updateUserSettingsInputValidator = tShape({ name: t.irreducible( userSettingsTypes.DEFAULT_NOTIFICATIONS, x => x === userSettingsTypes.DEFAULT_NOTIFICATIONS, ), data: t.enums.of(notificationTypeValues), }); async function updateUserSettingsResponder( viewer: Viewer, input: any, ): Promise { const request: UpdateUserSettingsRequest = input; await validateInput(viewer, updateUserSettingsInputValidator, request); return await updateUserSettings(viewer, request); } const policyAcknowledgmentRequestInputValidator = tShape({ policy: t.maybe(t.enums.of(policies)), }); async function policyAcknowledgmentResponder( viewer: Viewer, input: any, ): Promise { const request: PolicyAcknowledgmentRequest = input; await validateInput( viewer, policyAcknowledgmentRequestInputValidator, request, ); await viewerAcknowledgmentUpdater(viewer, request.policy); } export { userSubscriptionUpdateResponder, passwordUpdateResponder, sendVerificationEmailResponder, sendPasswordResetEmailResponder, logOutResponder, accountDeletionResponder, accountCreationResponder, logInResponder, siweAuthResponder, oldPasswordUpdateResponder, updateUserSettingsResponder, policyAcknowledgmentResponder, }; diff --git a/keyserver/src/responders/verification-responders.js b/keyserver/src/responders/verification-responders.js index 9b2a83a5f..7f5813ec5 100644 --- a/keyserver/src/responders/verification-responders.js +++ b/keyserver/src/responders/verification-responders.js @@ -1,27 +1,27 @@ // @flow import t from 'tcomb'; -import type { HandleVerificationCodeResult } from 'lib/types/verify-types'; -import { ServerError } from 'lib/utils/errors'; -import { tShape } from 'lib/utils/validation-utils'; +import type { HandleVerificationCodeResult } from 'lib/types/verify-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { tShape } from 'lib/utils/validation-utils.js'; -import type { Viewer } from '../session/viewer'; -import { validateInput } from '../utils/validation-utils'; +import type { Viewer } from '../session/viewer.js'; +import { validateInput } from '../utils/validation-utils.js'; const codeVerificationRequestInputValidator = tShape({ code: t.String, }); /* eslint-disable no-unused-vars */ async function codeVerificationResponder( viewer: Viewer, input: any, ): Promise { /* eslint-enable no-unused-vars */ await validateInput(viewer, codeVerificationRequestInputValidator, input); // We have no way to handle this request anymore throw new ServerError('deprecated'); } export { codeVerificationResponder }; diff --git a/keyserver/src/responders/version-responders.js b/keyserver/src/responders/version-responders.js index 1aee846f9..8190660c7 100644 --- a/keyserver/src/responders/version-responders.js +++ b/keyserver/src/responders/version-responders.js @@ -1,78 +1,78 @@ // @flow import type { $Response, $Request } from 'express'; import t from 'tcomb'; -import { isStaff } from 'lib/shared/user-utils'; -import type { CreateNewVersionsRequest } from 'lib/types/version-types'; -import { ServerError } from 'lib/utils/errors'; -import { tShape, tDeviceType } from 'lib/utils/validation-utils'; +import { isStaff } from 'lib/shared/user-utils.js'; +import type { CreateNewVersionsRequest } from 'lib/types/version-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { tShape, tDeviceType } from 'lib/utils/validation-utils.js'; -import createIDs from '../creators/id-creator'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; -import { validateInput } from '../utils/validation-utils'; +import createIDs from '../creators/id-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; +import { validateInput } from '../utils/validation-utils.js'; const createNewVersionInputValidator = tShape({ codeVersion: t.Number, deviceType: tDeviceType, }); async function createNewVersionResponder( viewer: Viewer, req: $Request, res: $Response, ): Promise { if (!viewer.loggedIn || !isStaff(viewer.userID)) { throw new ServerError('invalid_credentials'); } const request: CreateNewVersionsRequest = ({ codeVersion: parseInt(req.params.codeVersion), deviceType: req.params.deviceType, }: any); await validateInput(viewer, createNewVersionInputValidator, request); const [id] = await createIDs('versions', 1); const row = [id, request.codeVersion, request.deviceType, Date.now()]; const insertQuery = SQL` INSERT INTO versions (id, code_version, platform, creation_time) VALUES ${[row]} `; try { await dbQuery(insertQuery); res.json({ success: true }); } catch { await dbQuery(SQL`DELETE FROM ids WHERE id = ${id}`); res.json({ success: false }); } } async function markVersionDeployedResponder( viewer: Viewer, req: $Request, res: $Response, ): Promise { if (!viewer.loggedIn || !isStaff(viewer.userID)) { throw new ServerError('invalid_credentials'); } const request: CreateNewVersionsRequest = ({ codeVersion: parseInt(req.params.codeVersion), deviceType: req.params.deviceType, }: any); await validateInput(viewer, createNewVersionInputValidator, request); const updateQuery = SQL` UPDATE versions SET deploy_time = ${Date.now()} WHERE code_version = ${request.codeVersion} AND platform = ${request.deviceType} `; const [results] = await dbQuery(updateQuery); const success = !!(results.affectedRows && results.affectedRows > 0); res.json({ success }); } export { createNewVersionResponder, markVersionDeployedResponder }; diff --git a/keyserver/src/responders/website-responders.js b/keyserver/src/responders/website-responders.js index f11bb9436..b458b6f98 100644 --- a/keyserver/src/responders/website-responders.js +++ b/keyserver/src/responders/website-responders.js @@ -1,403 +1,403 @@ // @flow import html from 'common-tags/lib/html'; import type { $Response, $Request } from 'express'; import fs from 'fs'; -import _keyBy from 'lodash/fp/keyBy'; +import _keyBy from 'lodash/fp/keyBy.js'; import * as React from 'react'; import ReactDOMServer from 'react-dom/server'; import { promisify } from 'util'; import { baseLegalPolicies } from 'lib/facts/policies.js'; -import { daysToEntriesFromEntryInfos } from 'lib/reducers/entry-reducer'; -import { freshMessageStore } from 'lib/reducers/message-reducer'; -import { mostRecentlyReadThread } from 'lib/selectors/thread-selectors'; -import { mostRecentMessageTimestamp } from 'lib/shared/message-utils'; +import { daysToEntriesFromEntryInfos } from 'lib/reducers/entry-reducer.js'; +import { freshMessageStore } from 'lib/reducers/message-reducer.js'; +import { mostRecentlyReadThread } from 'lib/selectors/thread-selectors.js'; +import { mostRecentMessageTimestamp } from 'lib/shared/message-utils.js'; import { threadHasPermission, threadIsPending, parsePendingThreadID, createPendingThread, -} from 'lib/shared/thread-utils'; -import { defaultWebEnabledApps } from 'lib/types/enabled-apps'; -import { defaultCalendarFilters } from 'lib/types/filter-types'; -import { defaultNumberPerThread } from 'lib/types/message-types'; -import { defaultEnabledReports } from 'lib/types/report-types'; -import { defaultConnectionInfo } from 'lib/types/socket-types'; -import { threadPermissions, threadTypes } from 'lib/types/thread-types'; -import { currentDateInTimeZone } from 'lib/utils/date-utils'; -import { ServerError } from 'lib/utils/errors'; -import { promiseAll } from 'lib/utils/promises'; -import getTitle from 'web/title/getTitle'; -import { navInfoFromURL } from 'web/url-utils'; +} from 'lib/shared/thread-utils.js'; +import { defaultWebEnabledApps } from 'lib/types/enabled-apps.js'; +import { defaultCalendarFilters } from 'lib/types/filter-types.js'; +import { defaultNumberPerThread } from 'lib/types/message-types.js'; +import { defaultEnabledReports } from 'lib/types/report-types.js'; +import { defaultConnectionInfo } from 'lib/types/socket-types.js'; +import { threadPermissions, threadTypes } from 'lib/types/thread-types.js'; +import { currentDateInTimeZone } from 'lib/utils/date-utils.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { promiseAll } from 'lib/utils/promises.js'; +import getTitle from 'web/title/getTitle.js'; +import { navInfoFromURL } from 'web/url-utils.js'; -import { fetchEntryInfos } from '../fetchers/entry-fetchers'; -import { fetchMessageInfos } from '../fetchers/message-fetchers'; +import { fetchEntryInfos } from '../fetchers/entry-fetchers.js'; +import { fetchMessageInfos } from '../fetchers/message-fetchers.js'; import { hasAnyNotAcknowledgedPolicies } from '../fetchers/policy-acknowledgment-fetchers.js'; -import { fetchThreadInfos } from '../fetchers/thread-fetchers'; +import { fetchThreadInfos } from '../fetchers/thread-fetchers.js'; import { fetchCurrentUserInfo, fetchKnownUserInfos, -} from '../fetchers/user-fetchers'; -import { setNewSession } from '../session/cookies'; -import { Viewer } from '../session/viewer'; -import { streamJSON, waitForStream } from '../utils/json-stream'; -import { getAppURLFactsFromRequestURL } from '../utils/urls'; +} from '../fetchers/user-fetchers.js'; +import { setNewSession } from '../session/cookies.js'; +import { Viewer } from '../session/viewer.js'; +import { streamJSON, waitForStream } from '../utils/json-stream.js'; +import { getAppURLFactsFromRequestURL } from '../utils/urls.js'; const { renderToNodeStream } = ReactDOMServer; const access = promisify(fs.access); const readFile = promisify(fs.readFile); const googleFontsURL = 'https://fonts.googleapis.com/css2?family=IBM+Plex+Sans:wght@400;500;600&family=Inter:wght@400;500;600&display=swap'; const localFontsURL = 'fonts/local-fonts.css'; async function getFontsURL() { try { await access(localFontsURL); return localFontsURL; } catch { return googleFontsURL; } } type AssetInfo = { jsURL: string, fontsURL: string, cssInclude: string }; let assetInfo: ?AssetInfo = null; async function getAssetInfo() { if (assetInfo) { return assetInfo; } if (process.env.NODE_ENV === 'development') { const fontsURL = await getFontsURL(); assetInfo = { jsURL: 'http://localhost:8080/dev.build.js', fontsURL, cssInclude: '', }; return assetInfo; } try { const assetsString = await readFile('../web/dist/assets.json', 'utf8'); const assets = JSON.parse(assetsString); assetInfo = { jsURL: `compiled/${assets.browser.js}`, fontsURL: googleFontsURL, cssInclude: html` `, }; return assetInfo; } catch { throw new Error( 'Could not load assets.json for web build. ' + 'Did you forget to run `yarn dev` in the web folder?', ); } } let webpackCompiledRootComponent: ?React.ComponentType<{}> = null; async function getWebpackCompiledRootComponentForSSR() { if (webpackCompiledRootComponent) { return webpackCompiledRootComponent; } try { // $FlowFixMe web/dist doesn't always exist const webpackBuild = await import('web/dist/app.build.cjs'); webpackCompiledRootComponent = webpackBuild.default.default; return webpackCompiledRootComponent; } catch { throw new Error( 'Could not load app.build.cjs. ' + 'Did you forget to run `yarn dev` in the web folder?', ); } } async function websiteResponder( viewer: Viewer, req: $Request, res: $Response, ): Promise { const appURLFacts = getAppURLFactsFromRequestURL(req.originalUrl); const { basePath, baseDomain } = appURLFacts; const baseURL = basePath.replace(/\/$/, ''); const baseHref = baseDomain + baseURL; const loadingPromise = getWebpackCompiledRootComponentForSSR(); const hasNotAcknowledgedPoliciesPromise = hasAnyNotAcknowledgedPolicies( viewer.id, baseLegalPolicies, ); let initialNavInfo; try { initialNavInfo = navInfoFromURL(req.url, { now: currentDateInTimeZone(viewer.timeZone), }); } catch (e) { throw new ServerError(e.message); } const calendarQuery = { startDate: initialNavInfo.startDate, endDate: initialNavInfo.endDate, filters: defaultCalendarFilters, }; const messageSelectionCriteria = { joinedThreads: true }; const initialTime = Date.now(); const assetInfoPromise = getAssetInfo(); const threadInfoPromise = fetchThreadInfos(viewer); const messageInfoPromise = fetchMessageInfos( viewer, messageSelectionCriteria, defaultNumberPerThread, ); const entryInfoPromise = fetchEntryInfos(viewer, [calendarQuery]); const currentUserInfoPromise = fetchCurrentUserInfo(viewer); const userInfoPromise = fetchKnownUserInfos(viewer); const sessionIDPromise = (async () => { if (viewer.loggedIn) { await setNewSession(viewer, calendarQuery, initialTime); } return viewer.sessionID; })(); const threadStorePromise = (async () => { const [{ threadInfos }, hasNotAcknowledgedPolicies] = await Promise.all([ threadInfoPromise, hasNotAcknowledgedPoliciesPromise, ]); return { threadInfos: hasNotAcknowledgedPolicies ? {} : threadInfos }; })(); const messageStorePromise = (async () => { const [ { threadInfos }, { rawMessageInfos, truncationStatuses }, hasNotAcknowledgedPolicies, ] = await Promise.all([ threadInfoPromise, messageInfoPromise, hasNotAcknowledgedPoliciesPromise, ]); if (hasNotAcknowledgedPolicies) { return { messages: {}, threads: {}, local: {}, currentAsOf: 0, }; } const { messageStore: freshStore } = freshMessageStore( rawMessageInfos, truncationStatuses, mostRecentMessageTimestamp(rawMessageInfos, initialTime), threadInfos, ); return freshStore; })(); const entryStorePromise = (async () => { const [{ rawEntryInfos }, hasNotAcknowledgedPolicies] = await Promise.all([ entryInfoPromise, hasNotAcknowledgedPoliciesPromise, ]); if (hasNotAcknowledgedPolicies) { return { entryInfos: {}, daysToEntries: {}, lastUserInteractionCalendar: 0, }; } return { entryInfos: _keyBy('id')(rawEntryInfos), daysToEntries: daysToEntriesFromEntryInfos(rawEntryInfos), lastUserInteractionCalendar: initialTime, }; })(); const userStorePromise = (async () => { const [userInfos, hasNotAcknowledgedPolicies] = await Promise.all([ userInfoPromise, hasNotAcknowledgedPoliciesPromise, ]); return { userInfos: hasNotAcknowledgedPolicies ? {} : userInfos, inconsistencyReports: [], }; })(); const navInfoPromise = (async () => { const [ { threadInfos }, messageStore, currentUserInfo, userStore, ] = await Promise.all([ threadInfoPromise, messageStorePromise, currentUserInfoPromise, userStorePromise, ]); const finalNavInfo = initialNavInfo; const requestedActiveChatThreadID = finalNavInfo.activeChatThreadID; if ( requestedActiveChatThreadID && !threadIsPending(requestedActiveChatThreadID) && !threadHasPermission( threadInfos[requestedActiveChatThreadID], threadPermissions.VISIBLE, ) ) { finalNavInfo.activeChatThreadID = null; } if (!finalNavInfo.activeChatThreadID) { const mostRecentThread = mostRecentlyReadThread( messageStore, threadInfos, ); if (mostRecentThread) { finalNavInfo.activeChatThreadID = mostRecentThread; } } if ( finalNavInfo.activeChatThreadID && threadIsPending(finalNavInfo.activeChatThreadID) && finalNavInfo.pendingThread?.id !== finalNavInfo.activeChatThreadID ) { const pendingThreadData = parsePendingThreadID( finalNavInfo.activeChatThreadID, ); if ( pendingThreadData && pendingThreadData.threadType !== threadTypes.SIDEBAR && currentUserInfo.id ) { const { userInfos } = userStore; const members = pendingThreadData.memberIDs .map(id => userInfos[id]) .filter(Boolean); const newPendingThread = createPendingThread({ viewerID: currentUserInfo.id, threadType: pendingThreadData.threadType, members, }); finalNavInfo.activeChatThreadID = newPendingThread.id; finalNavInfo.pendingThread = newPendingThread; } } return finalNavInfo; })(); const currentAsOfPromise = (async () => { const hasNotAcknowledgedPolicies = await hasNotAcknowledgedPoliciesPromise; return hasNotAcknowledgedPolicies ? 0 : initialTime; })(); const { jsURL, fontsURL, cssInclude } = await assetInfoPromise; // prettier-ignore res.write(html` ${getTitle(0)} ${cssInclude}
`); const Loading = await loadingPromise; const reactStream = renderToNodeStream(); reactStream.pipe(res, { end: false }); await waitForStream(reactStream); res.write(html`
`); } export { websiteResponder }; diff --git a/keyserver/src/scripts/add-edit-thread-detailed-permissions.js b/keyserver/src/scripts/add-edit-thread-detailed-permissions.js index fb6878534..03e146ce5 100644 --- a/keyserver/src/scripts/add-edit-thread-detailed-permissions.js +++ b/keyserver/src/scripts/add-edit-thread-detailed-permissions.js @@ -1,52 +1,52 @@ // @flow -import bots from 'lib/facts/bots'; -import { assertThreadType } from 'lib/types/thread-types'; +import bots from 'lib/facts/bots.js'; +import { assertThreadType } from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; -import { createScriptViewer } from '../session/scripts'; -import { updateRoles } from '../updaters/role-updaters'; +import { dbQuery, SQL } from '../database/database.js'; +import { createScriptViewer } from '../session/scripts.js'; +import { updateRoles } from '../updaters/role-updaters.js'; import { recalculateThreadPermissions, commitMembershipChangeset, -} from '../updaters/thread-permission-updaters'; -import RelationshipChangeset from '../utils/relationship-changeset'; -import { main } from './utils'; +} from '../updaters/thread-permission-updaters.js'; +import RelationshipChangeset from '../utils/relationship-changeset.js'; +import { main } from './utils.js'; async function addEditThreadDetailedPermissions() { const batchSize = 10; const fetchThreads = SQL`SELECT id, type FROM threads`; const [result] = await dbQuery(fetchThreads); const threads = result.map(row => { return { id: row.id.toString(), type: assertThreadType(row.type) }; }); const viewer = createScriptViewer(bots.commbot.userID); while (threads.length > 0) { const batch = threads.splice(0, batchSize); const membershipRows = []; const relationshipChangeset = new RelationshipChangeset(); await Promise.all( batch.map(async thread => { console.log(`updating roles for ${thread.id}`); await updateRoles(viewer, thread.id, thread.type); console.log(`recalculating permissions for ${thread.id}`); const { membershipRows: threadMembershipRows, relationshipChangeset: threadRelationshipChangeset, } = await recalculateThreadPermissions(thread.id); membershipRows.push(...threadMembershipRows); relationshipChangeset.addAll(threadRelationshipChangeset); }), ); console.log(`committing batch ${JSON.stringify(batch)}`); await commitMembershipChangeset(viewer, { membershipRows, relationshipChangeset, }); } } main([addEditThreadDetailedPermissions]); diff --git a/keyserver/src/scripts/add-indexes-for-account-deletion.js b/keyserver/src/scripts/add-indexes-for-account-deletion.js index c14a245a4..2a1ea48d9 100644 --- a/keyserver/src/scripts/add-indexes-for-account-deletion.js +++ b/keyserver/src/scripts/add-indexes-for-account-deletion.js @@ -1,19 +1,19 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { setScriptContext } from './script-context'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { setScriptContext } from './script-context.js'; +import { main } from './utils.js'; setScriptContext({ allowMultiStatementSQLQueries: true, }); async function addIndexes() { await dbQuery(SQL` ALTER TABLE memberships ADD INDEX user (user); ALTER TABLE notifications ADD INDEX user (user); ALTER TABLE relationships_directed ADD UNIQUE user2_user1 (user2, user1); `); } main([addIndexes]); diff --git a/keyserver/src/scripts/add-key-column-for-sessions.js b/keyserver/src/scripts/add-key-column-for-sessions.js index 4d2bcf7d7..11292735c 100644 --- a/keyserver/src/scripts/add-key-column-for-sessions.js +++ b/keyserver/src/scripts/add-key-column-for-sessions.js @@ -1,14 +1,14 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function addPublicKeyColumn() { await dbQuery(SQL` ALTER TABLE sessions ADD public_key char(116) DEFAULT NULL, ADD UNIQUE INDEX public_key (public_key); `); } main([addPublicKeyColumn]); diff --git a/keyserver/src/scripts/add-key-column-for-users.js b/keyserver/src/scripts/add-key-column-for-users.js index 94af374d1..fac7da9d1 100644 --- a/keyserver/src/scripts/add-key-column-for-users.js +++ b/keyserver/src/scripts/add-key-column-for-users.js @@ -1,14 +1,14 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function addPublicKeyColumn() { await dbQuery(SQL` ALTER TABLE users ADD public_key char(116) DEFAULT NULL, ADD UNIQUE INDEX public_key (public_key); `); } main([addPublicKeyColumn]); diff --git a/keyserver/src/scripts/add-leave-thread-permissions.js b/keyserver/src/scripts/add-leave-thread-permissions.js index dd4716700..e0279ac3b 100644 --- a/keyserver/src/scripts/add-leave-thread-permissions.js +++ b/keyserver/src/scripts/add-leave-thread-permissions.js @@ -1,31 +1,31 @@ // @flow -import { threadPermissions, threadTypes } from 'lib/types/thread-types'; +import { threadPermissions, threadTypes } from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; -import { recalculateAllThreadPermissions } from '../updaters/thread-permission-updaters'; -import { endScript } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { recalculateAllThreadPermissions } from '../updaters/thread-permission-updaters.js'; +import { endScript } from './utils.js'; async function main() { try { await addLeaveThreadPermissions(); await recalculateAllThreadPermissions(); } catch (e) { console.warn(e); } finally { endScript(); } } async function addLeaveThreadPermissions() { const leaveThreadString = `$.${threadPermissions.LEAVE_THREAD}`; const updateAllRoles = SQL` UPDATE roles r LEFT JOIN threads t ON t.id = r.thread SET r.permissions = JSON_SET(permissions, ${leaveThreadString}, TRUE) WHERE t.type != ${threadTypes.PERSONAL} `; await dbQuery(updateAllRoles); } main(); diff --git a/keyserver/src/scripts/add-primary-column-for-cookies.js b/keyserver/src/scripts/add-primary-column-for-cookies.js index a585af727..0b5cfdbe1 100644 --- a/keyserver/src/scripts/add-primary-column-for-cookies.js +++ b/keyserver/src/scripts/add-primary-column-for-cookies.js @@ -1,13 +1,13 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function addPrimaryColumn() { await dbQuery(SQL` ALTER TABLE cookies ADD \`primary\` TINYINT(1) DEFAULT NULL; `); } main([addPrimaryColumn]); diff --git a/keyserver/src/scripts/add-source-message-column.js b/keyserver/src/scripts/add-source-message-column.js index cc9c4d594..d9b5c5f3e 100644 --- a/keyserver/src/scripts/add-source-message-column.js +++ b/keyserver/src/scripts/add-source-message-column.js @@ -1,13 +1,13 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function deleteUnreadColumn() { await dbQuery(SQL` ALTER TABLE threads ADD source_message BIGINT(20) NULL DEFAULT NULL AFTER color `); } main([deleteUnreadColumn]); diff --git a/keyserver/src/scripts/add-staff.js b/keyserver/src/scripts/add-staff.js index 494aca9c7..53a54e125 100644 --- a/keyserver/src/scripts/add-staff.js +++ b/keyserver/src/scripts/add-staff.js @@ -1,26 +1,26 @@ // @flow -import bots from 'lib/facts/bots'; +import bots from 'lib/facts/bots.js'; -import { createScriptViewer } from '../session/scripts'; -import { updateThread } from '../updaters/thread-updaters'; -import { main } from './utils'; +import { createScriptViewer } from '../session/scripts.js'; +import { updateThread } from '../updaters/thread-updaters.js'; +import { main } from './utils.js'; const newStaffIDs = ['518252']; async function addStaff() { await updateThread( createScriptViewer(bots.commbot.userID), { threadID: bots.commbot.staffThreadID, changes: { newMemberIDs: newStaffIDs, }, }, { forceAddMembers: true, }, ); } main([addStaff]); diff --git a/keyserver/src/scripts/add-target-time-index-to-updates-table.js b/keyserver/src/scripts/add-target-time-index-to-updates-table.js index 7dbcf7391..48af0fbfc 100644 --- a/keyserver/src/scripts/add-target-time-index-to-updates-table.js +++ b/keyserver/src/scripts/add-target-time-index-to-updates-table.js @@ -1,23 +1,23 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { setScriptContext } from './script-context'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { setScriptContext } from './script-context.js'; +import { main } from './utils.js'; setScriptContext({ allowMultiStatementSQLQueries: true, }); async function addIndexes() { await dbQuery(SQL` ALTER TABLE updates ADD INDEX target_time (target, time); ALTER TABLE updates DROP INDEX user_key_type; ALTER TABLE updates ADD INDEX user_key_target_type_time (user, \`key\`, target, type, time); ALTER TABLE updates ADD INDEX user_key_type_time (user, \`key\`, type, time); ALTER TABLE updates ADD INDEX user_key_time (user, \`key\`, time); `); } main([addIndexes]); diff --git a/keyserver/src/scripts/add-thread-ancestry.js b/keyserver/src/scripts/add-thread-ancestry.js index feca9ffc5..55cc473e8 100644 --- a/keyserver/src/scripts/add-thread-ancestry.js +++ b/keyserver/src/scripts/add-thread-ancestry.js @@ -1,81 +1,84 @@ // @flow -import { getContainingThreadID, getCommunity } from 'lib/shared/thread-utils'; -import type { ServerThreadInfo } from 'lib/types/thread-types'; +import { + getContainingThreadID, + getCommunity, +} from 'lib/shared/thread-utils.js'; +import type { ServerThreadInfo } from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; -import { fetchServerThreadInfos } from '../fetchers/thread-fetchers'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { fetchServerThreadInfos } from '../fetchers/thread-fetchers.js'; +import { main } from './utils.js'; async function addColumnAndIndexes() { await dbQuery(SQL` ALTER TABLE threads ADD containing_thread_id BIGINT(20) NULL AFTER parent_thread_id, ADD community BIGINT(20) NULL AFTER containing_thread_id, ADD depth INT UNSIGNED NOT NULL DEFAULT 0 AFTER community, ADD INDEX parent_thread_id (parent_thread_id), ADD INDEX community (community), ADD INDEX containing_thread_id (containing_thread_id); `); } async function setColumn() { const stack = [[null, SQL`t.parent_thread_id IS NULL`]]; while (stack.length > 0) { const [parentThreadInfo, predicate] = stack.shift(); const { threadInfos } = await fetchServerThreadInfos(predicate); const updatedThreadInfos = await setColumnForLayer( parentThreadInfo, threadInfos, ); for (const threadInfo of updatedThreadInfos) { stack.push([threadInfo, SQL`t.parent_thread_id = ${threadInfo.id}`]); } } } async function setColumnForLayer( parentThreadInfo: ?ServerThreadInfo, threadInfos: { +[id: string]: ServerThreadInfo }, ): Promise { const updatedThreadInfos = []; for (const threadID in threadInfos) { const threadInfo = threadInfos[threadID]; const containingThreadID = getContainingThreadID( parentThreadInfo, threadInfo.type, ); const community = getCommunity(parentThreadInfo); if (!containingThreadID && !community) { console.log( `containingThreadID and community are null for ${threadID}, ` + 'skipping...', ); updatedThreadInfos.push(threadInfo); continue; } const depth = parentThreadInfo ? parentThreadInfo.depth + 1 : 0; console.log( `setting containingThreadID to ${containingThreadID ?? 'null'}, ` + `community to ${community ?? 'null'}, and ` + `depth to ${depth} for ${threadID}`, ); await dbQuery(SQL` UPDATE threads SET containing_thread_id = ${containingThreadID}, community = ${community}, depth = ${depth} WHERE id = ${threadID} `); updatedThreadInfos.push({ ...threadInfo, containingThreadID, community, depth, }); } return updatedThreadInfos; } main([addColumnAndIndexes, setColumn]); diff --git a/keyserver/src/scripts/create-community.js b/keyserver/src/scripts/create-community.js index d122cbbbd..fba9125a7 100644 --- a/keyserver/src/scripts/create-community.js +++ b/keyserver/src/scripts/create-community.js @@ -1,20 +1,20 @@ // @flow -import ashoat from 'lib/facts/ashoat'; -import { threadTypes } from 'lib/types/thread-types'; +import ashoat from 'lib/facts/ashoat.js'; +import { threadTypes } from 'lib/types/thread-types.js'; -import { createThread } from '../creators/thread-creator'; -import { createScriptViewer } from '../session/scripts'; -import { main } from './utils'; +import { createThread } from '../creators/thread-creator.js'; +import { createScriptViewer } from '../session/scripts.js'; +import { main } from './utils.js'; const communityName = 'New community'; async function createCommunity() { const ashoatViewer = createScriptViewer(ashoat.id); await createThread(ashoatViewer, { type: threadTypes.COMMUNITY_ROOT, name: communityName, }); } main([createCommunity]); diff --git a/keyserver/src/scripts/create-db.js b/keyserver/src/scripts/create-db.js index 6fa69a90c..0593464b4 100644 --- a/keyserver/src/scripts/create-db.js +++ b/keyserver/src/scripts/create-db.js @@ -1,6 +1,6 @@ // @flow -import { setupDB } from '../database/setup-db'; -import { main } from './utils'; +import { setupDB } from '../database/setup-db.js'; +import { main } from './utils.js'; main([setupDB]); diff --git a/keyserver/src/scripts/create-friend-relationships.js b/keyserver/src/scripts/create-friend-relationships.js index ad162f44b..05d5266d2 100644 --- a/keyserver/src/scripts/create-friend-relationships.js +++ b/keyserver/src/scripts/create-friend-relationships.js @@ -1,31 +1,31 @@ // @flow -import { undirectedStatus } from 'lib/types/relationship-types'; +import { undirectedStatus } from 'lib/types/relationship-types.js'; -import { createUndirectedRelationships } from '../creators/relationship-creators'; -import { dbQuery, SQL } from '../database/database'; -import { endScript } from './utils'; +import { createUndirectedRelationships } from '../creators/relationship-creators.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { endScript } from './utils.js'; async function main() { try { await createFriendRelationshipsForThreadMembers(); endScript(); } catch (e) { endScript(); console.warn(e); } } async function createFriendRelationshipsForThreadMembers() { const [result] = await dbQuery(SQL` SELECT m.thread, m.user FROM memberships m LEFT JOIN users u ON u.id = m.user WHERE m.role > 0 AND u.id IS NOT NULL ORDER BY m.user ASC `); await createUndirectedRelationships(result, undirectedStatus.FRIEND); } main(); diff --git a/keyserver/src/scripts/create-many-threads-to-trigger-crash-loop.js b/keyserver/src/scripts/create-many-threads-to-trigger-crash-loop.js index af08854bf..e3466b39f 100644 --- a/keyserver/src/scripts/create-many-threads-to-trigger-crash-loop.js +++ b/keyserver/src/scripts/create-many-threads-to-trigger-crash-loop.js @@ -1,42 +1,42 @@ // @flow -import ashoat from 'lib/facts/ashoat'; +import ashoat from 'lib/facts/ashoat.js'; -import { createThread } from '../creators/thread-creator'; -import { createScriptViewer } from '../session/scripts'; -import { main } from './utils'; +import { createThread } from '../creators/thread-creator.js'; +import { createScriptViewer } from '../session/scripts.js'; +import { main } from './utils.js'; const testUserID = ''; const numOfThreads = 1000; async function createThreads( n: number, spammedUserID: string, spammingUserID: string, ): Promise<$ReadOnlyArray> { const threads = []; const viewer = createScriptViewer(spammingUserID); const initialMemberIDs = [spammedUserID]; const threadRequest = { type: 3, initialMemberIDs, parentThreadID: '1' }; for (let i = 0; i < n; i++) { const threadResponse = await createThread(viewer, threadRequest); if (threadResponse.newThreadID) { const threadID: string = threadResponse.newThreadID; threads.push(threadID); } } return threads; } // This script is used to trigger socket crash loop // Linear issue: https://linear.app/comm/issue/ENG-2075/reproduce-socket-crash-loop-in-production-with-artificial-test-data // Usage: set testUserID to the user you wish to trigger the crash loop for, // set iOS physical device networking profile to 3G and run the script, // open comm on physical device once the script has finnished // the app should be in a crash loop async function createManyThreadsToTriggerCrashLoop() { await createThreads(numOfThreads, testUserID, ashoat.id); } main([createManyThreadsToTriggerCrashLoop]); diff --git a/keyserver/src/scripts/create-metadata-table.js b/keyserver/src/scripts/create-metadata-table.js index bec586bb9..f47d0e5eb 100644 --- a/keyserver/src/scripts/create-metadata-table.js +++ b/keyserver/src/scripts/create-metadata-table.js @@ -1,22 +1,22 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function createTable() { await dbQuery(SQL` CREATE TABLE IF NOT EXISTS metadata ( name varchar(255) NOT NULL, data varchar(255) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; `); } async function addNameIndex() { await dbQuery(SQL` ALTER TABLE metadata ADD PRIMARY KEY (name); `); } main([createTable, addNameIndex]); diff --git a/keyserver/src/scripts/create-one-time-keys-table.js b/keyserver/src/scripts/create-one-time-keys-table.js index ec5e09382..ca8b72496 100644 --- a/keyserver/src/scripts/create-one-time-keys-table.js +++ b/keyserver/src/scripts/create-one-time-keys-table.js @@ -1,28 +1,28 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main, endScript } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main, endScript } from './utils.js'; async function addOneTimeKeysTable() { await dbQuery(SQL` CREATE TABLE IF NOT EXISTS one_time_keys ( user BIGINT(20) NOT NULL, one_time_key CHAR(43) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; `); } async function addUserIndex() { try { await dbQuery(SQL` ALTER TABLE one_time_keys ADD PRIMARY KEY (user, one_time_key); `); } catch (e) { console.warn(e); } finally { endScript(); } } main([addOneTimeKeysTable, addUserIndex]); diff --git a/keyserver/src/scripts/create-one-time-settings-table.js b/keyserver/src/scripts/create-one-time-settings-table.js index 026cb7ac0..3f6f35d70 100644 --- a/keyserver/src/scripts/create-one-time-settings-table.js +++ b/keyserver/src/scripts/create-one-time-settings-table.js @@ -1,29 +1,29 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main, endScript } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main, endScript } from './utils.js'; async function addOneTimeSettingsTable() { await dbQuery(SQL` CREATE TABLE IF NOT EXISTS settings ( user bigint(20) NOT NULL, name varchar(255) NOT NULL, data mediumtext COLLATE utf8mb4_bin DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; `); } async function addSettingsIndex() { try { await dbQuery(SQL` ALTER TABLE settings ADD PRIMARY KEY (user, name); `); } catch (e) { console.warn(e); } finally { endScript(); } } main([addOneTimeSettingsTable, addSettingsIndex]); diff --git a/keyserver/src/scripts/create-relationships.js b/keyserver/src/scripts/create-relationships.js index 426c8e511..5b2431da8 100644 --- a/keyserver/src/scripts/create-relationships.js +++ b/keyserver/src/scripts/create-relationships.js @@ -1,66 +1,66 @@ // @flow -import { undirectedStatus } from 'lib/types/relationship-types'; +import { undirectedStatus } from 'lib/types/relationship-types.js'; -import { createUndirectedRelationships } from '../creators/relationship-creators'; -import { dbQuery, SQL } from '../database/database'; -import { saveMemberships } from '../updaters/thread-permission-updaters'; -import { endScript } from './utils'; +import { createUndirectedRelationships } from '../creators/relationship-creators.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { saveMemberships } from '../updaters/thread-permission-updaters.js'; +import { endScript } from './utils.js'; async function main() { try { await alterMemberships(); await createMembershipsForFormerMembers(); await createKnowOfRelationships(); endScript(); } catch (e) { endScript(); console.warn(e); } } async function alterMemberships() { await dbQuery( SQL`ALTER TABLE memberships CHANGE permissions permissions json DEFAULT NULL`, ); } async function createMembershipsForFormerMembers() { const [result] = await dbQuery(SQL` SELECT DISTINCT thread, user FROM messages m WHERE NOT EXISTS ( SELECT thread, user FROM memberships mm WHERE m.thread = mm.thread AND m.user = mm.user ) `); const rowsToSave = []; for (const row of result) { rowsToSave.push({ operation: 'save', userID: row.user.toString(), threadID: row.thread.toString(), userNeedsFullThreadDetails: false, intent: 'none', permissions: null, permissionsForChildren: null, role: '-1', oldRole: '-1', }); } await saveMemberships(rowsToSave); } async function createKnowOfRelationships() { const [result] = await dbQuery(SQL` SELECT thread, user FROM memberships UNION SELECT thread, user FROM messages ORDER BY user ASC `); await createUndirectedRelationships(result, undirectedStatus.KNOW_OF); } main(); diff --git a/keyserver/src/scripts/create-sidebar-permissions.js b/keyserver/src/scripts/create-sidebar-permissions.js index d66369f2d..910821047 100644 --- a/keyserver/src/scripts/create-sidebar-permissions.js +++ b/keyserver/src/scripts/create-sidebar-permissions.js @@ -1,42 +1,42 @@ // @flow import { threadPermissions, threadPermissionPropagationPrefixes, -} from 'lib/types/thread-types'; +} from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; -import { recalculateAllThreadPermissions } from '../updaters/thread-permission-updaters'; -import { endScript } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { recalculateAllThreadPermissions } from '../updaters/thread-permission-updaters.js'; +import { endScript } from './utils.js'; async function main() { try { await createSidebarPermissions(); await recalculateAllThreadPermissions(); } catch (e) { console.warn(e); } finally { endScript(); } } async function createSidebarPermissions() { const createSidebarsString = `$.${threadPermissions.CREATE_SIDEBARS}`; const updateAllRoles = SQL` UPDATE roles SET permissions = JSON_SET(permissions, ${createSidebarsString}, TRUE) `; await dbQuery(updateAllRoles); const descendantSidebarsString = `$.${threadPermissionPropagationPrefixes.DESCENDANT}` + threadPermissions.CREATE_SIDEBARS; const updateAdminRoles = SQL` UPDATE roles SET permissions = JSON_SET(permissions, ${descendantSidebarsString}, TRUE) WHERE name = 'Admins' `; await dbQuery(updateAdminRoles); } main(); diff --git a/keyserver/src/scripts/create-user-messages-table.js b/keyserver/src/scripts/create-user-messages-table.js index 70373c7c5..72f552df1 100644 --- a/keyserver/src/scripts/create-user-messages-table.js +++ b/keyserver/src/scripts/create-user-messages-table.js @@ -1,30 +1,30 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function createTable() { await dbQuery(SQL` CREATE TABLE IF NOT EXISTS user_messages ( recipient bigint(20) NOT NULL, thread bigint(20) NOT NULL, message bigint(20) NOT NULL, time bigint(20) NOT NULL, data mediumtext COLLATE utf8mb4_bin DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; `); } async function addIndices() { try { await dbQuery(SQL` ALTER TABLE user_messages ADD INDEX recipient_time (recipient, time), ADD INDEX recipient_thread_time (recipient, thread, time), ADD INDEX thread (thread), ADD PRIMARY KEY (recipient, message); `); } catch {} } main([createTable, addIndices]); diff --git a/keyserver/src/scripts/delete-emails.js b/keyserver/src/scripts/delete-emails.js index bb9e36b36..c92061b0b 100644 --- a/keyserver/src/scripts/delete-emails.js +++ b/keyserver/src/scripts/delete-emails.js @@ -1,18 +1,18 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { setScriptContext } from './script-context'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { setScriptContext } from './script-context.js'; +import { main } from './utils.js'; setScriptContext({ allowMultiStatementSQLQueries: true, }); async function deleteEmails() { await dbQuery(SQL` DROP TABLE verifications; ALTER TABLE users DROP email, DROP email_verified; `); } main([deleteEmails]); diff --git a/keyserver/src/scripts/delete-memberships-of-deleted-users.js b/keyserver/src/scripts/delete-memberships-of-deleted-users.js index 73542c350..c389c9331 100644 --- a/keyserver/src/scripts/delete-memberships-of-deleted-users.js +++ b/keyserver/src/scripts/delete-memberships-of-deleted-users.js @@ -1,17 +1,17 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function deleteMemberships() { const query = SQL` DELETE m FROM memberships m LEFT JOIN users u ON u.id = m.user WHERE m.role = -1 AND u.id IS NULL `; await dbQuery(query); } main([deleteMemberships]); diff --git a/keyserver/src/scripts/delete-unread-column.js b/keyserver/src/scripts/delete-unread-column.js index b843974e0..246b1b4eb 100644 --- a/keyserver/src/scripts/delete-unread-column.js +++ b/keyserver/src/scripts/delete-unread-column.js @@ -1,19 +1,19 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { endScript } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { endScript } from './utils.js'; async function deleteUnreadColumn() { try { await dbQuery(SQL` ALTER TABLE memberships DROP COLUMN unread `); } catch (e) { console.warn(e); } finally { endScript(); } } deleteUnreadColumn(); diff --git a/keyserver/src/scripts/fix-new-thread-types.js b/keyserver/src/scripts/fix-new-thread-types.js index d22d8d3ff..277d07ef5 100644 --- a/keyserver/src/scripts/fix-new-thread-types.js +++ b/keyserver/src/scripts/fix-new-thread-types.js @@ -1,119 +1,119 @@ // @flow -import bots from 'lib/facts/bots'; -import { threadTypes, assertThreadType } from 'lib/types/thread-types'; +import bots from 'lib/facts/bots.js'; +import { threadTypes, assertThreadType } from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; -import { createScriptViewer } from '../session/scripts'; -import { updateThread } from '../updaters/thread-updaters'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { createScriptViewer } from '../session/scripts.js'; +import { updateThread } from '../updaters/thread-updaters.js'; +import { main } from './utils.js'; const batchSize = 10; const updateThreadOptions = { forceUpdateRoot: true }; const threadObjectComparator = (a, b) => a.id - b.id; // When we introduced threadTypes.PERSONAL and threadTypes.PRIVATE, we made some // mistakes in how we converted existing threads into the new thread types: // (1) For both PRIVATE and PERSONAL, we didn't handle converting threads that // had multiple roles properly. updateRoles was written to handle this, but // we missed it and wrote some code that just converted all roles to the new // role type instead of deleting extra roles and migrating those members // over to the new single role. // (2) We allowed multiple threads per user to be converted into PRIVATE // threads. // (3) We allowed threads with a parent to be converted into PRIVATE threads. // (4) We forgot to include EDIT_ENTRIES permissions for PRIVATE threads. async function fixNewThreadTypes() { const fetchBrokenThreads = SQL` SELECT t.id, t.type, t.parent_thread_id, MIN(m.user) AS user FROM threads t LEFT JOIN memberships m ON m.thread = t.id WHERE t.type IN (${[threadTypes.PERSONAL, threadTypes.PRIVATE]}) GROUP BY t.id `; const [result] = await dbQuery(fetchBrokenThreads); const forceUpdatePersonalThreadIDs = new Set(); const privateThreadsByUser = new Map(); for (const row of result) { const id = row.id.toString(); const threadType = assertThreadType(row.type); if (threadType === threadTypes.PERSONAL) { forceUpdatePersonalThreadIDs.add(id); continue; } const user = row.user.toString(); const parentThreadID = row.parent_thread_id ? row.parent_thread_id.toString() : null; let userPrivateThreads = privateThreadsByUser.get(user); if (!userPrivateThreads) { userPrivateThreads = new Set(); privateThreadsByUser.set(user, userPrivateThreads); } userPrivateThreads.add({ id, parentThreadID }); } const forceUpdatePrivateThreadIDs = new Set(); const unsetPrivateThreads = new Set(); for (const userPrivateThreads of privateThreadsByUser.values()) { const sortedPrivateThreads = [...userPrivateThreads].sort( threadObjectComparator, ); while (sortedPrivateThreads.length > 0) { const privateThread = sortedPrivateThreads.shift(); if (!privateThread.parentThreadID) { forceUpdatePrivateThreadIDs.add(privateThread.id); break; } unsetPrivateThreads.add(privateThread.id); } for (const privateThread of sortedPrivateThreads) { unsetPrivateThreads.add(privateThread.id); } } const updateThreadRequests = []; for (const threadID of forceUpdatePersonalThreadIDs) { updateThreadRequests.push({ threadID, changes: { type: threadTypes.PERSONAL, }, }); } for (const threadID of forceUpdatePrivateThreadIDs) { updateThreadRequests.push({ threadID, changes: { type: threadTypes.PRIVATE, }, }); } for (const threadID of unsetPrivateThreads) { updateThreadRequests.push({ threadID, changes: { type: threadTypes.COMMUNITY_SECRET_SUBTHREAD, description: '', }, }); } const viewer = createScriptViewer(bots.commbot.userID); while (updateThreadRequests.length > 0) { const batch = updateThreadRequests.splice(0, batchSize); await Promise.all( batch.map(async updateThreadRequest => { console.log(`updating ${JSON.stringify(updateThreadRequest)}`); return await updateThread( viewer, updateThreadRequest, updateThreadOptions, ); }), ); } } main([fixNewThreadTypes]); diff --git a/keyserver/src/scripts/generate-olm-config.js b/keyserver/src/scripts/generate-olm-config.js index a1b1f3540..a3ed9286c 100644 --- a/keyserver/src/scripts/generate-olm-config.js +++ b/keyserver/src/scripts/generate-olm-config.js @@ -1,39 +1,39 @@ // @flow import olm from '@matrix-org/olm'; import fs from 'fs'; import path from 'path'; import uuid from 'uuid'; -import { main } from './utils'; +import { main } from './utils.js'; const olmConfigRelativePath = './secrets/olm_config.json'; async function generateOlmConfig() { await olm.init(); const account = new olm.Account(); account.create(); const picklingKey = uuid.v4(); const pickledAccount = account.pickle(picklingKey); const olmConfig = { picklingKey: picklingKey, pickledAccount: pickledAccount, }; const scriptWorkingDirectory = path.resolve(); if (!scriptWorkingDirectory.endsWith('comm/keyserver')) { throw new Error( 'Script must be run in keyserver directory in comm project.', ); } const olmConfigFilePath = path.join( scriptWorkingDirectory, olmConfigRelativePath, ); fs.writeFileSync(olmConfigFilePath, JSON.stringify(olmConfig)); } main([generateOlmConfig]); diff --git a/keyserver/src/scripts/image-size.js b/keyserver/src/scripts/image-size.js index 90c69adc3..47996770b 100644 --- a/keyserver/src/scripts/image-size.js +++ b/keyserver/src/scripts/image-size.js @@ -1,36 +1,36 @@ // @flow import sizeOf from 'buffer-image-size'; -import { dbQuery, SQL } from '../database/database'; -import { endScript } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { endScript } from './utils.js'; async function main() { try { await addImageSizeToUploadsTable(); endScript(); } catch (e) { endScript(); console.warn(e); } } async function addImageSizeToUploadsTable() { await dbQuery(SQL`ALTER TABLE uploads ADD extra JSON NULL AFTER secret;`); const [result] = await dbQuery(SQL` SELECT id, content FROM uploads WHERE type = "photo" AND extra IS NULL `); for (const row of result) { const { height, width } = sizeOf(row.content); const dimensions = JSON.stringify({ height, width }); await dbQuery(SQL` UPDATE uploads SET extra = ${dimensions} WHERE id = ${row.id} `); } } main(); diff --git a/keyserver/src/scripts/make-channel-private.js b/keyserver/src/scripts/make-channel-private.js index 78c7c029e..35392e4ba 100644 --- a/keyserver/src/scripts/make-channel-private.js +++ b/keyserver/src/scripts/make-channel-private.js @@ -1,20 +1,20 @@ // @flow -import ashoat from 'lib/facts/ashoat'; -import { threadTypes } from 'lib/types/thread-types'; +import ashoat from 'lib/facts/ashoat.js'; +import { threadTypes } from 'lib/types/thread-types.js'; -import { createScriptViewer } from '../session/scripts'; -import { updateThread } from '../updaters/thread-updaters'; -import { main } from './utils'; +import { createScriptViewer } from '../session/scripts.js'; +import { updateThread } from '../updaters/thread-updaters.js'; +import { main } from './utils.js'; const channelID = '-1'; async function makeChannelPrivate() { const viewer = createScriptViewer(ashoat.id); await updateThread(viewer, { threadID: channelID, changes: { type: threadTypes.COMMUNITY_SECRET_SUBTHREAD }, }); } main([makeChannelPrivate]); diff --git a/keyserver/src/scripts/make-notif-columns-optional.js b/keyserver/src/scripts/make-notif-columns-optional.js index 81c79189a..8ac80b0b6 100644 --- a/keyserver/src/scripts/make-notif-columns-optional.js +++ b/keyserver/src/scripts/make-notif-columns-optional.js @@ -1,24 +1,24 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { endScript } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { endScript } from './utils.js'; async function main() { try { await makeNotifColumnsOptional(); } catch (e) { console.warn(e); } finally { endScript(); } } async function makeNotifColumnsOptional() { await dbQuery(SQL` ALTER TABLE notifications CHANGE thread thread BIGINT(20) NULL DEFAULT NULL, CHANGE message message BIGINT(20) NULL DEFAULT NULL `); } main(); diff --git a/keyserver/src/scripts/make-source-message-unique.js b/keyserver/src/scripts/make-source-message-unique.js index 864110ee2..3d44cbc23 100644 --- a/keyserver/src/scripts/make-source-message-unique.js +++ b/keyserver/src/scripts/make-source-message-unique.js @@ -1,13 +1,13 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function makeSourceMessageUnique() { await dbQuery(SQL` ALTER TABLE threads ADD UNIQUE (source_message) `); } main([makeSourceMessageUnique]); diff --git a/keyserver/src/scripts/merge-users.js b/keyserver/src/scripts/merge-users.js index 59b2f85ee..f130c7bd3 100644 --- a/keyserver/src/scripts/merge-users.js +++ b/keyserver/src/scripts/merge-users.js @@ -1,194 +1,194 @@ // @flow -import type { Shape } from 'lib/types/core'; -import type { ServerThreadInfo } from 'lib/types/thread-types'; -import { type UpdateData, updateTypes } from 'lib/types/update-types'; - -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL } from '../database/database'; -import type { SQLStatementType } from '../database/types'; -import { deleteAccount } from '../deleters/account-deleters'; -import { fetchServerThreadInfos } from '../fetchers/thread-fetchers'; -import { createScriptViewer } from '../session/scripts'; +import type { Shape } from 'lib/types/core.js'; +import type { ServerThreadInfo } from 'lib/types/thread-types.js'; +import { type UpdateData, updateTypes } from 'lib/types/update-types.js'; + +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import type { SQLStatementType } from '../database/types.js'; +import { deleteAccount } from '../deleters/account-deleters.js'; +import { fetchServerThreadInfos } from '../fetchers/thread-fetchers.js'; +import { createScriptViewer } from '../session/scripts.js'; import { changeRole, commitMembershipChangeset, -} from '../updaters/thread-permission-updaters'; -import RelationshipChangeset from '../utils/relationship-changeset'; -import { endScript } from './utils'; +} from '../updaters/thread-permission-updaters.js'; +import RelationshipChangeset from '../utils/relationship-changeset.js'; +import { endScript } from './utils.js'; async function main() { try { await mergeUsers('7147', '15972', { username: true, password: true }); endScript(); } catch (e) { endScript(); console.warn(e); } } type ReplaceUserInfo = Shape<{ +username: boolean, +password: boolean, }>; async function mergeUsers( fromUserID: string, toUserID: string, replaceUserInfo?: ReplaceUserInfo, ) { let updateUserRowQuery = null; let updateDatas = []; if (replaceUserInfo) { const replaceUserResult = await replaceUser( fromUserID, toUserID, replaceUserInfo, ); ({ sql: updateUserRowQuery, updateDatas } = replaceUserResult); } const usersGettingUpdate = new Set(); const usersNeedingUpdate = new Set(); const needUserInfoUpdate = replaceUserInfo && replaceUserInfo.username; const setGettingUpdate = (threadInfo: ServerThreadInfo) => { if (!needUserInfoUpdate) { return; } for (const { id } of threadInfo.members) { usersGettingUpdate.add(id); usersNeedingUpdate.delete(id); } }; const setNeedingUpdate = (threadInfo: ServerThreadInfo) => { if (!needUserInfoUpdate) { return; } for (const { id } of threadInfo.members) { if (!usersGettingUpdate.has(id)) { usersNeedingUpdate.add(id); } } }; const newThreadRolePairs = []; const { threadInfos } = await fetchServerThreadInfos(); for (const threadID in threadInfos) { const threadInfo = threadInfos[threadID]; const fromUserExistingMember = threadInfo.members.find( memberInfo => memberInfo.id === fromUserID, ); if (!fromUserExistingMember) { setNeedingUpdate(threadInfo); continue; } const { role } = fromUserExistingMember; if (!role) { // Only transfer explicit memberships setNeedingUpdate(threadInfo); continue; } const toUserExistingMember = threadInfo.members.find( memberInfo => memberInfo.id === toUserID, ); if (!toUserExistingMember || !toUserExistingMember.role) { setGettingUpdate(threadInfo); newThreadRolePairs.push([threadID, role]); } else { setNeedingUpdate(threadInfo); } } const fromViewer = createScriptViewer(fromUserID); await deleteAccount(fromViewer); if (updateUserRowQuery) { await dbQuery(updateUserRowQuery); } const time = Date.now(); for (const userID of usersNeedingUpdate) { updateDatas.push({ type: updateTypes.UPDATE_USER, userID, time, updatedUserID: toUserID, }); } await createUpdates(updateDatas); const changesets = await Promise.all( newThreadRolePairs.map(([threadID, role]) => changeRole(threadID, [toUserID], role), ), ); const membershipRows = []; const relationshipChangeset = new RelationshipChangeset(); for (const currentChangeset of changesets) { const { membershipRows: currentMembershipRows, relationshipChangeset: currentRelationshipChangeset, } = currentChangeset; membershipRows.push(...currentMembershipRows); relationshipChangeset.addAll(currentRelationshipChangeset); } if (membershipRows.length > 0 || relationshipChangeset.getRowCount() > 0) { const toViewer = createScriptViewer(toUserID); const changeset = { membershipRows, relationshipChangeset }; await commitMembershipChangeset(toViewer, changeset); } } type ReplaceUserResult = { sql: ?SQLStatementType, updateDatas: UpdateData[], }; async function replaceUser( fromUserID: string, toUserID: string, replaceUserInfo: ReplaceUserInfo, ): Promise { if (Object.keys(replaceUserInfo).length === 0) { return { sql: null, updateDatas: [], }; } const fromUserQuery = SQL` SELECT username, hash FROM users WHERE id = ${fromUserID} `; const [fromUserResult] = await dbQuery(fromUserQuery); const [firstResult] = fromUserResult; if (!firstResult) { throw new Error(`couldn't fetch fromUserID ${fromUserID}`); } const changedFields = {}; if (replaceUserInfo.username) { changedFields.username = firstResult.username; } if (replaceUserInfo.password) { changedFields.hash = firstResult.hash; } const updateUserRowQuery = SQL` UPDATE users SET ${changedFields} WHERE id = ${toUserID} `; const updateDatas = []; if (replaceUserInfo.username) { updateDatas.push({ type: updateTypes.UPDATE_CURRENT_USER, userID: toUserID, time: Date.now(), }); } return { sql: updateUserRowQuery, updateDatas, }; } main(); diff --git a/keyserver/src/scripts/move-threads.js b/keyserver/src/scripts/move-threads.js index 269e15415..1155ba6a7 100644 --- a/keyserver/src/scripts/move-threads.js +++ b/keyserver/src/scripts/move-threads.js @@ -1,36 +1,36 @@ // @flow -import ashoat from 'lib/facts/ashoat'; -import { threadTypes } from 'lib/types/thread-types'; +import ashoat from 'lib/facts/ashoat.js'; +import { threadTypes } from 'lib/types/thread-types.js'; -import { createScriptViewer } from '../session/scripts'; -import { updateThread } from '../updaters/thread-updaters'; -import { main } from './utils'; +import { createScriptViewer } from '../session/scripts.js'; +import { updateThread } from '../updaters/thread-updaters.js'; +import { main } from './utils.js'; async function moveThreads() { const viewer = createScriptViewer(ashoat.id); await updateThread( viewer, { threadID: '1251682', // comm global hq changes: { type: threadTypes.COMMUNITY_SECRET_SUBTHREAD, parentThreadID: '311733', // Comm }, }, { ignorePermissions: true }, ); await updateThread( viewer, { threadID: '1512796', // Bird App changes: { type: threadTypes.COMMUNITY_OPEN_SUBTHREAD, parentThreadID: '311733', // Comm }, }, { ignorePermissions: true }, ); } main([moveThreads]); diff --git a/keyserver/src/scripts/rename-sidebar-message-fields.js b/keyserver/src/scripts/rename-sidebar-message-fields.js index 19a55b6d6..cf171eb31 100644 --- a/keyserver/src/scripts/rename-sidebar-message-fields.js +++ b/keyserver/src/scripts/rename-sidebar-message-fields.js @@ -1,36 +1,36 @@ // @flow -import { messageTypes } from 'lib/types/message-types'; +import { messageTypes } from 'lib/types/message-types.js'; -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function renameSidebarSource() { const query = SQL` UPDATE messages SET content = JSON_REMOVE( JSON_SET(content, '$.sourceMessageID', JSON_EXTRACT(content, '$.initialMessageID') ), '$.initialMessageID' ) WHERE type = ${messageTypes.SIDEBAR_SOURCE} `; await dbQuery(query); } async function renameCreateSidebar() { const query = SQL` UPDATE messages SET content = JSON_REMOVE( JSON_SET(content, '$.sourceMessageAuthorID', JSON_EXTRACT(content, '$.initialMessageAuthorID') ), '$.initialMessageAuthorID' ) WHERE type = ${messageTypes.CREATE_SIDEBAR} `; await dbQuery(query); } main([renameSidebarSource, renameCreateSidebar]); diff --git a/keyserver/src/scripts/rename-user-column-for-one-time-keys.js b/keyserver/src/scripts/rename-user-column-for-one-time-keys.js index cb6f535ef..b48497996 100644 --- a/keyserver/src/scripts/rename-user-column-for-one-time-keys.js +++ b/keyserver/src/scripts/rename-user-column-for-one-time-keys.js @@ -1,14 +1,14 @@ // @flow -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function renameUserToSession() { await dbQuery(SQL` ALTER TABLE one_time_keys CHANGE COLUMN user session bigint(20) NOT NULL; `); } main([renameUserToSession]); diff --git a/keyserver/src/scripts/rename-user.js b/keyserver/src/scripts/rename-user.js index 81c31bcbf..ff54f7902 100644 --- a/keyserver/src/scripts/rename-user.js +++ b/keyserver/src/scripts/rename-user.js @@ -1,40 +1,40 @@ // @flow -import { updateTypes } from 'lib/types/update-types'; +import { updateTypes } from 'lib/types/update-types.js'; -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL } from '../database/database'; -import { fetchKnownUserInfos } from '../fetchers/user-fetchers'; -import { createScriptViewer } from '../session/scripts'; -import { main } from './utils'; +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { fetchKnownUserInfos } from '../fetchers/user-fetchers.js'; +import { createScriptViewer } from '../session/scripts.js'; +import { main } from './utils.js'; const userID = '5'; const newUsername = 'commbot'; async function renameUser() { const [adjacentUsers] = await Promise.all([ fetchKnownUserInfos(createScriptViewer(userID)), dbQuery( SQL`UPDATE users SET username = ${newUsername} WHERE id = ${userID}`, ), ]); const updateDatas = []; const time = Date.now(); updateDatas.push({ type: updateTypes.UPDATE_CURRENT_USER, userID, time, }); for (const adjacentUserID in adjacentUsers) { updateDatas.push({ type: updateTypes.UPDATE_USER, userID: adjacentUserID, time, updatedUserID: userID, }); } await createUpdates(updateDatas); } main([renameUser]); diff --git a/keyserver/src/scripts/rescind-notifs.js b/keyserver/src/scripts/rescind-notifs.js index 241fd73a0..3d5e31e8e 100644 --- a/keyserver/src/scripts/rescind-notifs.js +++ b/keyserver/src/scripts/rescind-notifs.js @@ -1,44 +1,44 @@ // @flow -import { threadTypes } from 'lib/types/thread-types'; +import { threadTypes } from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; -import { createScriptViewer } from '../session/scripts'; -import { activityUpdater } from '../updaters/activity-updaters'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { createScriptViewer } from '../session/scripts.js'; +import { activityUpdater } from '../updaters/activity-updaters.js'; +import { main } from './utils.js'; async function rescindNotifs() { const fetchRescindThreadInfo = SQL` SELECT m.user, m.thread, m.last_message FROM users u INNER JOIN memberships m ON m.user = u.id INNER JOIN threads t ON t.id = m.thread WHERE t.type IN (${[threadTypes.PERSONAL, threadTypes.PRIVATE]}) `; const [result] = await dbQuery(fetchRescindThreadInfo); const usersToActivityUpdates = new Map(); for (const row of result) { const user = row.user.toString(); let activityUpdates = usersToActivityUpdates.get(user); if (!activityUpdates) { activityUpdates = []; usersToActivityUpdates.set(user, activityUpdates); } activityUpdates.push({ focus: false, threadID: row.thread.toString(), latestMessage: row.last_message.toString(), }); } for (const [user, activityUpdates] of usersToActivityUpdates) { await activityUpdater(createScriptViewer(user), { updates: activityUpdates, }); } } main([rescindNotifs]); diff --git a/keyserver/src/scripts/reset-password.js b/keyserver/src/scripts/reset-password.js index ca96732f0..175b28fdd 100644 --- a/keyserver/src/scripts/reset-password.js +++ b/keyserver/src/scripts/reset-password.js @@ -1,16 +1,16 @@ // @flow import bcrypt from 'twin-bcrypt'; -import { dbQuery, SQL } from '../database/database'; -import { main } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { main } from './utils.js'; const userID = '-1'; const password = 'password'; async function updatePassword() { const hash = bcrypt.hashSync(password); await dbQuery(SQL`UPDATE users SET hash = ${hash} WHERE id = ${userID}`); } main([updatePassword]); diff --git a/keyserver/src/scripts/set-last-read-messages.js b/keyserver/src/scripts/set-last-read-messages.js index cf674f558..7ef3397c4 100644 --- a/keyserver/src/scripts/set-last-read-messages.js +++ b/keyserver/src/scripts/set-last-read-messages.js @@ -1,130 +1,130 @@ // @flow -import { messageTypes } from 'lib/types/message-types'; -import { threadPermissions } from 'lib/types/thread-types'; +import { messageTypes } from 'lib/types/message-types.js'; +import { threadPermissions } from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; -import { endScript } from './utils'; +import { dbQuery, SQL } from '../database/database.js'; +import { endScript } from './utils.js'; async function main() { try { await createLastMessageColumn(); await setLastReadMessage(); } catch (e) { console.warn(e); } finally { endScript(); } } async function createLastMessageColumn() { try { return await dbQuery(SQL` ALTER TABLE memberships ADD last_read_message bigint(20) NOT NULL DEFAULT 0, ADD last_message bigint(20) NOT NULL DEFAULT 0 `); } catch (e) { console.info('Column probably exists', e); } } async function setLastReadMessage() { const knowOfExtractString = `$.${threadPermissions.KNOW_OF}.value`; const [result] = await dbQuery(SQL` SELECT MAX(msg.id) AS message, msg.thread, stm.user FROM messages msg LEFT JOIN memberships stm ON msg.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = msg.content WHERE msg.type != ${messageTypes.CREATE_SUB_THREAD} OR JSON_EXTRACT(stm.permissions, ${knowOfExtractString}) IS TRUE GROUP BY msg.thread, stm.user `); const lastMessages = []; const userSpecificLastMessages = []; for (const row of result) { if (row.user) { userSpecificLastMessages.push({ message: row.message, thread: row.thread, user: row.user, }); } else { lastMessages.push({ message: row.message, thread: row.thread, }); } } if (lastMessages.length > 0) { const lastMessageExpression = SQL`last_message = CASE `; const lastReadMessageExpression = SQL`last_read_message = CASE `; for (const entry of lastMessages) { lastMessageExpression.append(SQL` WHEN thread = ${entry.thread} THEN ${entry.message} `); lastReadMessageExpression.append(SQL` WHEN thread = ${entry.thread} AND unread = 0 THEN ${entry.message} `); } lastMessageExpression.append(SQL` ELSE last_message END, `); lastReadMessageExpression.append(SQL` ELSE last_read_message END `); const query = SQL` UPDATE memberships SET `; query.append(lastMessageExpression); query.append(lastReadMessageExpression); await dbQuery(query); } if (userSpecificLastMessages.length > 0) { const lastMessageExpression = SQL` last_message = GREATEST(last_message, CASE `; const lastReadMessageExpression = SQL` last_read_message = GREATEST(last_read_message, CASE `; for (const entry of userSpecificLastMessages) { lastMessageExpression.append(SQL` WHEN thread = ${entry.thread} AND user = ${entry.user} THEN ${entry.message} `); lastReadMessageExpression.append(SQL` WHEN thread = ${entry.thread} AND unread = 0 AND user = ${entry.user} THEN ${entry.message} `); } lastMessageExpression.append(SQL` ELSE last_message END), `); lastReadMessageExpression.append(SQL` ELSE last_read_message END) `); const query = SQL` UPDATE memberships SET `; query.append(lastMessageExpression); query.append(lastReadMessageExpression); await dbQuery(query); } } main(); diff --git a/keyserver/src/scripts/setup-sidebars.js b/keyserver/src/scripts/setup-sidebars.js index 8ff989863..dc661bed8 100644 --- a/keyserver/src/scripts/setup-sidebars.js +++ b/keyserver/src/scripts/setup-sidebars.js @@ -1,125 +1,125 @@ // @flow -import { messageSpecs } from 'lib/shared/messages/message-specs'; -import { messageTypes } from 'lib/types/message-types'; -import { updateTypes } from 'lib/types/update-types'; +import { messageSpecs } from 'lib/shared/messages/message-specs.js'; +import { messageTypes } from 'lib/types/message-types.js'; +import { updateTypes } from 'lib/types/update-types.js'; -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, mergeOrConditions, SQL } from '../database/database'; -import { main } from './utils'; +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, mergeOrConditions, SQL } from '../database/database.js'; +import { main } from './utils.js'; async function addRepliesCountColumn() { const update = SQL` ALTER TABLE threads ADD replies_count INT UNSIGNED NOT NULL DEFAULT 0 `; try { await dbQuery(update); } catch (e) { console.log(e, 'replies-count column already exists'); } } async function addSenderColumn() { const update = SQL` ALTER TABLE memberships ADD sender TINYINT(1) UNSIGNED NOT NULL DEFAULT 0 `; try { await dbQuery(update); } catch (e) { console.log(e, 'sender column already exists'); } } async function computeRepliesCount() { const includedMessageTypes = Object.keys(messageTypes) .map(key => messageTypes[key]) .filter(type => messageSpecs[type].includedInRepliesCount); const sidebarMembersQuery = SQL` SELECT t.id AS threadID, m.user AS userID FROM threads t INNER JOIN memberships m ON t.id = m.thread WHERE t.source_message IS NOT NULL AND m.role >= 0 `; const readCountUpdate = SQL` UPDATE threads t INNER JOIN ( SELECT thread AS threadID, COUNT(*) AS count FROM messages WHERE type IN (${includedMessageTypes}) GROUP BY thread ) c ON c.threadID = t.id SET t.replies_count = c.count WHERE t.source_message IS NOT NULL `; const [[sidebarMembers]] = await Promise.all([ dbQuery(sidebarMembersQuery), dbQuery(readCountUpdate), ]); const time = Date.now(); const updates = sidebarMembers.map(({ threadID, userID }) => ({ userID, time, threadID, type: updateTypes.UPDATE_THREAD, })); await createUpdates(updates); } export async function determineSenderStatus() { const includedMessageTypes = Object.keys(messageTypes) .map(key => messageTypes[key]) .filter(type => messageSpecs[type].includedInRepliesCount); const sendersQuery = SQL` SELECT DISTINCT m.thread AS threadID, m.user AS userID FROM messages m WHERE m.type IN (${includedMessageTypes}) `; const [senders] = await dbQuery(sendersQuery); const conditions = senders.map( ({ threadID, userID }) => SQL`thread = ${threadID} AND user = ${userID}`, ); const setSenders = SQL` UPDATE memberships m SET m.sender = 1 WHERE `; setSenders.append(mergeOrConditions(conditions)); const updatedThreads = new Set(senders.map(({ threadID }) => threadID)); const affectedMembersQuery = SQL` SELECT thread AS threadID, user AS userID FROM memberships WHERE thread IN (${[...updatedThreads]}) AND role >= 0 `; const [[affectedMembers]] = await Promise.all([ dbQuery(affectedMembersQuery), dbQuery(setSenders), ]); const time = Date.now(); const updates = affectedMembers.map(({ threadID, userID }) => ({ userID, time, threadID, type: updateTypes.UPDATE_THREAD, })); await createUpdates(updates); } main([ addRepliesCountColumn, addSenderColumn, computeRepliesCount, determineSenderStatus, ]); diff --git a/keyserver/src/scripts/sidebar-know-of-migration.js b/keyserver/src/scripts/sidebar-know-of-migration.js index f33feb55c..f58cfad93 100644 --- a/keyserver/src/scripts/sidebar-know-of-migration.js +++ b/keyserver/src/scripts/sidebar-know-of-migration.js @@ -1,68 +1,68 @@ // @flow -import bots from 'lib/facts/bots'; -import { threadTypes, type ThreadType } from 'lib/types/thread-types'; +import bots from 'lib/facts/bots.js'; +import { threadTypes, type ThreadType } from 'lib/types/thread-types.js'; -import { dbQuery, SQL } from '../database/database'; -import { createScriptViewer } from '../session/scripts'; -import { updateRoles } from '../updaters/role-updaters'; +import { dbQuery, SQL } from '../database/database.js'; +import { createScriptViewer } from '../session/scripts.js'; +import { updateRoles } from '../updaters/role-updaters.js'; import { recalculateThreadPermissions, commitMembershipChangeset, -} from '../updaters/thread-permission-updaters'; -import RelationshipChangeset from '../utils/relationship-changeset'; -import { main } from './utils'; +} from '../updaters/thread-permission-updaters.js'; +import RelationshipChangeset from '../utils/relationship-changeset.js'; +import { main } from './utils.js'; async function updatePrivateThreads() { console.log('updating private threads'); await updateThreads(threadTypes.PRIVATE); } async function updateSidebars() { console.log('updating sidebars'); await updateThreads(threadTypes.SIDEBAR); } const batchSize = 10; async function updateThreads(threadType: ThreadType) { const fetchThreads = SQL` SELECT id FROM threads WHERE type = ${threadType} `; const [result] = await dbQuery(fetchThreads); const threadIDs = result.map(row => row.id.toString()); const viewer = createScriptViewer(bots.commbot.userID); while (threadIDs.length > 0) { const batch = threadIDs.splice(0, batchSize); const membershipRows = []; const relationshipChangeset = new RelationshipChangeset(); await Promise.all( batch.map(async threadID => { console.log(`updating roles for ${threadID}`); await updateRoles(viewer, threadID, threadType); console.log(`recalculating permissions for ${threadID}`); const { membershipRows: threadMembershipRows, relationshipChangeset: threadRelationshipChangeset, } = await recalculateThreadPermissions(threadID); membershipRows.push(...threadMembershipRows); relationshipChangeset.addAll(threadRelationshipChangeset); }), ); console.log(`committing batch ${JSON.stringify(batch)}`); await commitMembershipChangeset(viewer, { membershipRows, relationshipChangeset, }); } } // This migration is supposed to update the database to reflect // https://phabricator.ashoat.com/D1020. There are two changes there: // (1) Changes to SIDEBAR so membership no longer automatically confers KNOW_OF // (2) Changes to PRIVATE so all of its children have KNOW_OF // We want to apply the changes to PRIVATE first so that when we recalculate // the permissions for any of a PRIVATE thread's SIDEBARs, the parent has // already been updated. main([updatePrivateThreads, updateSidebars]); diff --git a/keyserver/src/scripts/soft-launch-migration.js b/keyserver/src/scripts/soft-launch-migration.js index cac666f20..ed9f5ebb1 100644 --- a/keyserver/src/scripts/soft-launch-migration.js +++ b/keyserver/src/scripts/soft-launch-migration.js @@ -1,429 +1,429 @@ // @flow import invariant from 'invariant'; -import ashoat from 'lib/facts/ashoat'; -import bots from 'lib/facts/bots'; -import genesis from 'lib/facts/genesis'; -import testers from 'lib/facts/testers'; -import { messageTypes } from 'lib/types/message-types'; -import { threadTypes, type ThreadType } from 'lib/types/thread-types'; - -import createMessages from '../creators/message-creator'; -import { createThread } from '../creators/thread-creator'; -import { dbQuery, SQL } from '../database/database'; -import { fetchServerThreadInfos } from '../fetchers/thread-fetchers'; -import { fetchAllUserIDs } from '../fetchers/user-fetchers'; -import { createScriptViewer } from '../session/scripts'; -import type { Viewer } from '../session/viewer'; +import ashoat from 'lib/facts/ashoat.js'; +import bots from 'lib/facts/bots.js'; +import genesis from 'lib/facts/genesis.js'; +import testers from 'lib/facts/testers.js'; +import { messageTypes } from 'lib/types/message-types.js'; +import { threadTypes, type ThreadType } from 'lib/types/thread-types.js'; + +import createMessages from '../creators/message-creator.js'; +import { createThread } from '../creators/thread-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { fetchServerThreadInfos } from '../fetchers/thread-fetchers.js'; +import { fetchAllUserIDs } from '../fetchers/user-fetchers.js'; +import { createScriptViewer } from '../session/scripts.js'; +import type { Viewer } from '../session/viewer.js'; import { recalculateThreadPermissions, commitMembershipChangeset, saveMemberships, -} from '../updaters/thread-permission-updaters'; -import { updateThread } from '../updaters/thread-updaters'; -import { main } from './utils'; +} from '../updaters/thread-permission-updaters.js'; +import { updateThread } from '../updaters/thread-updaters.js'; +import { main } from './utils.js'; const batchSize = 10; const createThreadOptions = { forceAddMembers: true }; const updateThreadOptions = { forceUpdateRoot: true, silenceMessages: true, ignorePermissions: true, }; const convertUnadminnedToCommunities = ['311733', '421638']; const convertToAnnouncementCommunities = ['375310']; const convertToAnnouncementSubthreads = ['82649']; const threadsWithMissingParent = ['534395']; const personalThreadsWithMissingMembers = [ '82161', '103111', '210609', '227049', ]; const excludeFromTestersThread = new Set([ '1402', '39227', '156159', '526973', '740732', ]); async function createGenesisCommunity() { const genesisThreadInfos = await fetchServerThreadInfos( SQL`t.id = ${genesis.id}`, ); const genesisThreadInfo = genesisThreadInfos.threadInfos[genesis.id]; if (genesisThreadInfo && genesisThreadInfo.type === threadTypes.GENESIS) { return; } else if (genesisThreadInfo) { return await updateGenesisCommunityType(); } console.log('creating GENESIS community'); const idInsertQuery = SQL` INSERT INTO ids(id, table_name) VALUES ${[[genesis.id, 'threads']]} `; await dbQuery(idInsertQuery); const ashoatViewer = createScriptViewer(ashoat.id); const allUserIDs = await fetchAllUserIDs(); const nonAshoatUserIDs = allUserIDs.filter(id => id !== ashoat.id); await createThread( ashoatViewer, { id: genesis.id, type: threadTypes.GENESIS, name: genesis.name, description: genesis.description, initialMemberIDs: nonAshoatUserIDs, }, createThreadOptions, ); await createMessages( ashoatViewer, genesis.introMessages.map(message => ({ type: messageTypes.TEXT, threadID: genesis.id, creatorID: ashoat.id, time: Date.now(), text: message, })), ); console.log('creating testers thread'); const testerUserIDs = nonAshoatUserIDs.filter( userID => !excludeFromTestersThread.has(userID), ); const { newThreadID } = await createThread( ashoatViewer, { type: threadTypes.COMMUNITY_SECRET_SUBTHREAD, name: testers.name, description: testers.description, initialMemberIDs: testerUserIDs, }, createThreadOptions, ); invariant( newThreadID, 'newThreadID for tester thread creation should be set', ); await createMessages( ashoatViewer, testers.introMessages.map(message => ({ type: messageTypes.TEXT, threadID: newThreadID, creatorID: ashoat.id, time: Date.now(), text: message, })), ); } async function updateGenesisCommunityType() { console.log('updating GENESIS community to GENESIS type'); const ashoatViewer = createScriptViewer(ashoat.id); await updateThread( ashoatViewer, { threadID: genesis.id, changes: { type: threadTypes.GENESIS, }, }, updateThreadOptions, ); } async function convertExistingCommunities() { const communityQuery = SQL` SELECT t.id, t.name FROM threads t LEFT JOIN roles r ON r.thread = t.id LEFT JOIN memberships m ON m.thread = t.id WHERE t.type = ${threadTypes.COMMUNITY_SECRET_SUBTHREAD} AND t.parent_thread_id IS NULL GROUP BY t.id HAVING COUNT(DISTINCT r.id) > 1 AND COUNT(DISTINCT m.user) > 2 `; const [convertToCommunity] = await dbQuery(communityQuery); const botViewer = createScriptViewer(bots.commbot.userID); await convertThreads( botViewer, convertToCommunity, threadTypes.COMMUNITY_ROOT, ); } async function convertThreads( viewer: Viewer, threads: Array<{ +id: number, +name: string }>, type: ThreadType, ) { while (threads.length > 0) { const batch = threads.splice(0, batchSize); await Promise.all( batch.map(async thread => { console.log(`converting ${JSON.stringify(thread)} to ${type}`); return await updateThread( viewer, { threadID: thread.id.toString(), changes: { type }, }, updateThreadOptions, ); }), ); } } async function convertUnadminnedCommunities() { const communityQuery = SQL` SELECT id, name FROM threads WHERE id IN (${convertUnadminnedToCommunities}) AND type = ${threadTypes.COMMUNITY_SECRET_SUBTHREAD} `; const [convertToCommunity] = await dbQuery(communityQuery); // We use ashoat here to make sure he becomes the admin of these communities const ashoatViewer = createScriptViewer(ashoat.id); await convertThreads( ashoatViewer, convertToCommunity, threadTypes.COMMUNITY_ROOT, ); } async function convertAnnouncementCommunities() { const announcementCommunityQuery = SQL` SELECT id, name FROM threads WHERE id IN (${convertToAnnouncementCommunities}) AND type != ${threadTypes.COMMUNITY_ANNOUNCEMENT_ROOT} `; const [convertToAnnouncementCommunity] = await dbQuery( announcementCommunityQuery, ); const botViewer = createScriptViewer(bots.commbot.userID); await convertThreads( botViewer, convertToAnnouncementCommunity, threadTypes.COMMUNITY_ANNOUNCEMENT_ROOT, ); } async function convertAnnouncementSubthreads() { const announcementSubthreadQuery = SQL` SELECT id, name FROM threads WHERE id IN (${convertToAnnouncementSubthreads}) AND type != ${threadTypes.COMMUNITY_OPEN_ANNOUNCEMENT_SUBTHREAD} `; const [convertToAnnouncementSubthread] = await dbQuery( announcementSubthreadQuery, ); const botViewer = createScriptViewer(bots.commbot.userID); await convertThreads( botViewer, convertToAnnouncementSubthread, threadTypes.COMMUNITY_OPEN_ANNOUNCEMENT_SUBTHREAD, ); } async function fixThreadsWithMissingParent() { const threadsWithMissingParentQuery = SQL` SELECT id, name FROM threads WHERE id IN (${threadsWithMissingParent}) AND type != ${threadTypes.COMMUNITY_SECRET_SUBTHREAD} `; const [threadsWithMissingParentResult] = await dbQuery( threadsWithMissingParentQuery, ); const botViewer = createScriptViewer(bots.commbot.userID); while (threadsWithMissingParentResult.length > 0) { const batch = threadsWithMissingParentResult.splice(0, batchSize); await Promise.all( batch.map(async thread => { console.log(`fixing ${JSON.stringify(thread)} with missing parent`); return await updateThread( botViewer, { threadID: thread.id.toString(), changes: { parentThreadID: null, type: threadTypes.COMMUNITY_SECRET_SUBTHREAD, }, }, updateThreadOptions, ); }), ); } } async function fixPersonalThreadsWithMissingMembers() { const missingMembersQuery = SQL` SELECT thread, user FROM memberships WHERE thread IN (${personalThreadsWithMissingMembers}) AND role <= 0 `; const [missingMembers] = await dbQuery(missingMembersQuery); const botViewer = createScriptViewer(bots.commbot.userID); for (const row of missingMembers) { console.log(`fixing ${JSON.stringify(row)} with missing member`); await updateThread( botViewer, { threadID: row.thread.toString(), changes: { newMemberIDs: [row.user.toString()], }, }, updateThreadOptions, ); } } async function moveThreadsToGenesis() { const noParentQuery = SQL` SELECT id, name FROM threads WHERE type != ${threadTypes.COMMUNITY_ROOT} AND type != ${threadTypes.COMMUNITY_ANNOUNCEMENT_ROOT} AND type != ${threadTypes.GENESIS} AND parent_thread_id IS NULL `; const [noParentThreads] = await dbQuery(noParentQuery); const botViewer = createScriptViewer(bots.commbot.userID); while (noParentThreads.length > 0) { const batch = noParentThreads.splice(0, batchSize); await Promise.all( batch.map(async thread => { console.log(`processing ${JSON.stringify(thread)}`); return await updateThread( botViewer, { threadID: thread.id.toString(), changes: { parentThreadID: genesis.id, }, }, updateThreadOptions, ); }), ); } const childQuery = SQL` SELECT id, name FROM threads WHERE type != ${threadTypes.COMMUNITY_ROOT} AND type != ${threadTypes.COMMUNITY_ANNOUNCEMENT_ROOT} AND type != ${threadTypes.GENESIS} AND parent_thread_id IS NOT NULL AND parent_thread_id != ${genesis.id} `; const [childThreads] = await dbQuery(childQuery); for (const childThread of childThreads) { // We go one by one because the changes in a parent thread can affect a // child thread. If the child thread update starts at the same time as an // update for its parent thread, a race can cause incorrect results for the // child thread (in particular for the permissions on the memberships table) console.log(`processing ${JSON.stringify(childThread)}`); await updateThread( botViewer, { threadID: childThread.id.toString(), changes: {}, }, updateThreadOptions, ); } } async function clearMembershipPermissions() { const membershipPermissionQuery = SQL` SELECT DISTINCT thread FROM memberships WHERE JSON_EXTRACT(permissions, '$.membership') IS NOT NULL `; const [membershipPermissionResult] = await dbQuery(membershipPermissionQuery); if (membershipPermissionResult.length === 0) { return; } const botViewer = createScriptViewer(bots.commbot.userID); for (const row of membershipPermissionResult) { const threadID = row.thread.toString(); console.log(`clearing membership permissions for ${threadID}`); const changeset = await recalculateThreadPermissions(threadID); await commitMembershipChangeset(botViewer, changeset); } console.log('clearing -1 rows...'); const emptyMembershipDeletionQuery = SQL` DELETE FROM memberships WHERE role = -1 AND permissions IS NULL `; await dbQuery(emptyMembershipDeletionQuery); await createMembershipsForFormerMembers(); } async function createMembershipsForFormerMembers() { const [result] = await dbQuery(SQL` SELECT DISTINCT thread, user FROM messages m WHERE NOT EXISTS ( SELECT thread, user FROM memberships mm WHERE m.thread = mm.thread AND m.user = mm.user ) `); const rowsToSave = []; for (const row of result) { rowsToSave.push({ operation: 'save', userID: row.user.toString(), threadID: row.thread.toString(), userNeedsFullThreadDetails: false, intent: 'none', permissions: null, permissionsForChildren: null, role: '-1', oldRole: '-1', }); } await saveMemberships(rowsToSave); } main([ createGenesisCommunity, convertExistingCommunities, convertUnadminnedCommunities, convertAnnouncementCommunities, convertAnnouncementSubthreads, fixThreadsWithMissingParent, fixPersonalThreadsWithMissingMembers, moveThreadsToGenesis, clearMembershipPermissions, ]); diff --git a/keyserver/src/scripts/update-geoip.js b/keyserver/src/scripts/update-geoip.js index efa994fc4..9ced4403c 100644 --- a/keyserver/src/scripts/update-geoip.js +++ b/keyserver/src/scripts/update-geoip.js @@ -1,16 +1,16 @@ // @flow -import { updateGeoipDB } from '../cron/update-geoip-db'; -import { endScript } from './utils'; +import { updateGeoipDB } from '../cron/update-geoip-db.js'; +import { endScript } from './utils.js'; async function main() { try { await updateGeoipDB(); endScript(); } catch (e) { endScript(); console.warn(e); } } main(); diff --git a/keyserver/src/scripts/utils.js b/keyserver/src/scripts/utils.js index bfb30c08e..cca24afeb 100644 --- a/keyserver/src/scripts/utils.js +++ b/keyserver/src/scripts/utils.js @@ -1,26 +1,26 @@ // @flow -import { endPool } from '../database/database'; -import { endFirebase, endAPNs } from '../push/providers'; -import { publisher } from '../socket/redis'; +import { endPool } from '../database/database.js'; +import { endFirebase, endAPNs } from '../push/providers.js'; +import { publisher } from '../socket/redis.js'; function endScript() { endPool(); publisher.end(); endFirebase(); endAPNs(); } async function main(functions: $ReadOnlyArray<() => Promise>) { try { for (const f of functions) { await f(); } } catch (e) { console.warn(e); } finally { endScript(); } } export { endScript, main }; diff --git a/keyserver/src/search/users.js b/keyserver/src/search/users.js index a8cff2f68..6fcdfb425 100644 --- a/keyserver/src/search/users.js +++ b/keyserver/src/search/users.js @@ -1,30 +1,30 @@ // @flow -import type { UserSearchRequest } from 'lib/types/search-types'; -import type { GlobalAccountUserInfo } from 'lib/types/user-types'; +import type { UserSearchRequest } from 'lib/types/search-types.js'; +import type { GlobalAccountUserInfo } from 'lib/types/user-types.js'; -import { dbQuery, SQL } from '../database/database'; +import { dbQuery, SQL } from '../database/database.js'; async function searchForUsers( query: UserSearchRequest, ): Promise { const sqlQuery = SQL`SELECT id, username FROM users `; const prefix = query.prefix; if (prefix) { sqlQuery.append(SQL`WHERE LOWER(username) LIKE LOWER(${prefix + '%'}) `); } sqlQuery.append(SQL`LIMIT 20`); const [result] = await dbQuery(sqlQuery); const userInfos = []; for (const row of result) { userInfos.push({ id: row.id.toString(), username: row.username, }); } return userInfos; } export { searchForUsers }; diff --git a/keyserver/src/session/bots.js b/keyserver/src/session/bots.js index 5c7a02123..46247475a 100644 --- a/keyserver/src/session/bots.js +++ b/keyserver/src/session/bots.js @@ -1,37 +1,37 @@ // @flow -import bots from 'lib/facts/bots'; -import { ServerError } from 'lib/utils/errors'; +import bots from 'lib/facts/bots.js'; +import { ServerError } from 'lib/utils/errors.js'; -import { Viewer } from './viewer'; +import { Viewer } from './viewer.js'; // Note that since the returned Viewer doesn't have a valid cookieID or // sessionID, a lot of things can go wrong when trying to use it with certain // functions. function createBotViewer(userID: string): Viewer { let userIDIsBot = false; for (const botName in bots) { if (bots[botName].userID === userID) { userIDIsBot = true; break; } } if (!userIDIsBot) { throw new ServerError('invalid_bot_id'); } return new Viewer({ isSocket: true, loggedIn: true, id: userID, platformDetails: null, deviceToken: null, userID, cookieID: null, cookiePassword: null, sessionID: null, sessionInfo: null, isScriptViewer: true, }); } export { createBotViewer }; diff --git a/keyserver/src/session/cookies.js b/keyserver/src/session/cookies.js index 79a9a2fce..5ec510545 100644 --- a/keyserver/src/session/cookies.js +++ b/keyserver/src/session/cookies.js @@ -1,845 +1,848 @@ // @flow import crypto from 'crypto'; import type { $Response, $Request } from 'express'; import invariant from 'invariant'; import bcrypt from 'twin-bcrypt'; import url from 'url'; -import { hasMinCodeVersion } from 'lib/shared/version-utils'; -import type { Shape } from 'lib/types/core'; -import type { Platform, PlatformDetails } from 'lib/types/device-types'; -import type { CalendarQuery } from 'lib/types/entry-types'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; +import type { Shape } from 'lib/types/core.js'; +import type { Platform, PlatformDetails } from 'lib/types/device-types.js'; +import type { CalendarQuery } from 'lib/types/entry-types.js'; import { type ServerSessionChange, cookieLifetime, cookieSources, type CookieSource, cookieTypes, sessionIdentifierTypes, type SessionIdentifierType, -} from 'lib/types/session-types'; +} from 'lib/types/session-types.js'; import type { SIWESocialProof } from 'lib/types/siwe-types.js'; -import type { InitialClientSocketMessage } from 'lib/types/socket-types'; -import type { UserInfo } from 'lib/types/user-types'; -import { values } from 'lib/utils/objects'; -import { promiseAll } from 'lib/utils/promises'; - -import createIDs from '../creators/id-creator'; -import { createSession } from '../creators/session-creator'; -import { dbQuery, SQL } from '../database/database'; -import { deleteCookie } from '../deleters/cookie-deleters'; -import { handleAsyncPromise } from '../responders/handlers'; -import { clearDeviceToken } from '../updaters/device-token-updaters'; -import { updateThreadMembers } from '../updaters/thread-updaters'; -import { assertSecureRequest } from '../utils/security-utils'; -import { type AppURLFacts, getAppURLFactsFromRequestURL } from '../utils/urls'; -import { Viewer } from './viewer'; -import type { AnonymousViewerData, UserViewerData } from './viewer'; +import type { InitialClientSocketMessage } from 'lib/types/socket-types.js'; +import type { UserInfo } from 'lib/types/user-types.js'; +import { values } from 'lib/utils/objects.js'; +import { promiseAll } from 'lib/utils/promises.js'; + +import createIDs from '../creators/id-creator.js'; +import { createSession } from '../creators/session-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { deleteCookie } from '../deleters/cookie-deleters.js'; +import { handleAsyncPromise } from '../responders/handlers.js'; +import { clearDeviceToken } from '../updaters/device-token-updaters.js'; +import { updateThreadMembers } from '../updaters/thread-updaters.js'; +import { assertSecureRequest } from '../utils/security-utils.js'; +import { + type AppURLFacts, + getAppURLFactsFromRequestURL, +} from '../utils/urls.js'; +import { Viewer } from './viewer.js'; +import type { AnonymousViewerData, UserViewerData } from './viewer.js'; function cookieIsExpired(lastUsed: number) { return lastUsed + cookieLifetime <= Date.now(); } type SessionParameterInfo = { isSocket: boolean, sessionID: ?string, sessionIdentifierType: SessionIdentifierType, ipAddress: string, userAgent: ?string, }; type FetchViewerResult = | { type: 'valid', viewer: Viewer } | InvalidFetchViewerResult; type InvalidFetchViewerResult = | { type: 'nonexistant', cookieName: ?string, cookieSource: ?CookieSource, sessionParameterInfo: SessionParameterInfo, } | { type: 'invalidated', cookieName: string, cookieID: string, cookieSource: CookieSource, sessionParameterInfo: SessionParameterInfo, platformDetails: ?PlatformDetails, deviceToken: ?string, }; async function fetchUserViewer( cookie: string, cookieSource: CookieSource, sessionParameterInfo: SessionParameterInfo, ): Promise { const [cookieID, cookiePassword] = cookie.split(':'); if (!cookieID || !cookiePassword) { return { type: 'nonexistant', cookieName: cookieTypes.USER, cookieSource, sessionParameterInfo, }; } const query = SQL` SELECT hash, user, last_used, platform, device_token, versions FROM cookies WHERE id = ${cookieID} AND user IS NOT NULL `; const [[result], allSessionInfo] = await Promise.all([ dbQuery(query), fetchSessionInfo(sessionParameterInfo, cookieID), ]); if (result.length === 0) { return { type: 'nonexistant', cookieName: cookieTypes.USER, cookieSource, sessionParameterInfo, }; } let sessionID = null, sessionInfo = null; if (allSessionInfo) { ({ sessionID, ...sessionInfo } = allSessionInfo); } const cookieRow = result[0]; let platformDetails = null; if (cookieRow.versions) { const versions = JSON.parse(cookieRow.versions); platformDetails = { platform: cookieRow.platform, codeVersion: versions.codeVersion, stateVersion: versions.stateVersion, }; } else { platformDetails = { platform: cookieRow.platform }; } const deviceToken = cookieRow.device_token; if ( !bcrypt.compareSync(cookiePassword, cookieRow.hash) || cookieIsExpired(cookieRow.last_used) ) { return { type: 'invalidated', cookieName: cookieTypes.USER, cookieID, cookieSource, sessionParameterInfo, platformDetails, deviceToken, }; } const userID = cookieRow.user.toString(); const viewer = new Viewer({ isSocket: sessionParameterInfo.isSocket, loggedIn: true, id: userID, platformDetails, deviceToken, userID, cookieSource, cookieID, cookiePassword, sessionIdentifierType: sessionParameterInfo.sessionIdentifierType, sessionID, sessionInfo, isScriptViewer: false, ipAddress: sessionParameterInfo.ipAddress, userAgent: sessionParameterInfo.userAgent, }); return { type: 'valid', viewer }; } async function fetchAnonymousViewer( cookie: string, cookieSource: CookieSource, sessionParameterInfo: SessionParameterInfo, ): Promise { const [cookieID, cookiePassword] = cookie.split(':'); if (!cookieID || !cookiePassword) { return { type: 'nonexistant', cookieName: cookieTypes.ANONYMOUS, cookieSource, sessionParameterInfo, }; } const query = SQL` SELECT last_used, hash, platform, device_token, versions FROM cookies WHERE id = ${cookieID} AND user IS NULL `; const [[result], allSessionInfo] = await Promise.all([ dbQuery(query), fetchSessionInfo(sessionParameterInfo, cookieID), ]); if (result.length === 0) { return { type: 'nonexistant', cookieName: cookieTypes.ANONYMOUS, cookieSource, sessionParameterInfo, }; } let sessionID = null, sessionInfo = null; if (allSessionInfo) { ({ sessionID, ...sessionInfo } = allSessionInfo); } const cookieRow = result[0]; let platformDetails = null; if (cookieRow.platform && cookieRow.versions) { const versions = JSON.parse(cookieRow.versions); platformDetails = { platform: cookieRow.platform, codeVersion: versions.codeVersion, stateVersion: versions.stateVersion, }; } else if (cookieRow.platform) { platformDetails = { platform: cookieRow.platform }; } const deviceToken = cookieRow.device_token; if ( !bcrypt.compareSync(cookiePassword, cookieRow.hash) || cookieIsExpired(cookieRow.last_used) ) { return { type: 'invalidated', cookieName: cookieTypes.ANONYMOUS, cookieID, cookieSource, sessionParameterInfo, platformDetails, deviceToken, }; } const viewer = new Viewer({ isSocket: sessionParameterInfo.isSocket, loggedIn: false, id: cookieID, platformDetails, deviceToken, cookieSource, cookieID, cookiePassword, sessionIdentifierType: sessionParameterInfo.sessionIdentifierType, sessionID, sessionInfo, isScriptViewer: false, ipAddress: sessionParameterInfo.ipAddress, userAgent: sessionParameterInfo.userAgent, }); return { type: 'valid', viewer }; } type SessionInfo = { +sessionID: ?string, +lastValidated: number, +lastUpdate: number, +calendarQuery: CalendarQuery, }; async function fetchSessionInfo( sessionParameterInfo: SessionParameterInfo, cookieID: string, ): Promise { const { sessionID } = sessionParameterInfo; const session = sessionID !== undefined ? sessionID : cookieID; if (!session) { return null; } const query = SQL` SELECT query, last_validated, last_update FROM sessions WHERE id = ${session} AND cookie = ${cookieID} `; const [result] = await dbQuery(query); if (result.length === 0) { return null; } return { sessionID, lastValidated: result[0].last_validated, lastUpdate: result[0].last_update, calendarQuery: JSON.parse(result[0].query), }; } // This function is meant to consume a cookie that has already been processed. // That means it doesn't have any logic to handle an invalid cookie, and it // doesn't update the cookie's last_used timestamp. async function fetchViewerFromCookieData( req: $Request, sessionParameterInfo: SessionParameterInfo, ): Promise { let viewerResult; const { user, anonymous } = req.cookies; if (user) { viewerResult = await fetchUserViewer( user, cookieSources.HEADER, sessionParameterInfo, ); } else if (anonymous) { viewerResult = await fetchAnonymousViewer( anonymous, cookieSources.HEADER, sessionParameterInfo, ); } else { return { type: 'nonexistant', cookieName: null, cookieSource: null, sessionParameterInfo, }; } // We protect against CSRF attacks by making sure that on web, // non-GET requests cannot use a bare cookie for session identification if (viewerResult.type === 'valid') { const { viewer } = viewerResult; invariant( req.method === 'GET' || viewer.sessionIdentifierType !== sessionIdentifierTypes.COOKIE_ID || viewer.platform !== 'web', 'non-GET request from web using sessionIdentifierTypes.COOKIE_ID', ); } return viewerResult; } async function fetchViewerFromRequestBody( body: mixed, sessionParameterInfo: SessionParameterInfo, ): Promise { if (!body || typeof body !== 'object') { return { type: 'nonexistant', cookieName: null, cookieSource: null, sessionParameterInfo, }; } const cookiePair = body.cookie; if (cookiePair === null || cookiePair === '') { return { type: 'nonexistant', cookieName: null, cookieSource: cookieSources.BODY, sessionParameterInfo, }; } if (!cookiePair || typeof cookiePair !== 'string') { return { type: 'nonexistant', cookieName: null, cookieSource: null, sessionParameterInfo, }; } const [type, cookie] = cookiePair.split('='); if (type === cookieTypes.USER && cookie) { return await fetchUserViewer( cookie, cookieSources.BODY, sessionParameterInfo, ); } else if (type === cookieTypes.ANONYMOUS && cookie) { return await fetchAnonymousViewer( cookie, cookieSources.BODY, sessionParameterInfo, ); } return { type: 'nonexistant', cookieName: null, cookieSource: null, sessionParameterInfo, }; } function getRequestIPAddress(req: $Request) { const { proxy } = getAppURLFactsFromRequestURL(req.originalUrl); let ipAddress; if (proxy === 'none') { ipAddress = req.socket.remoteAddress; } else if (proxy === 'apache') { ipAddress = req.get('X-Forwarded-For'); } invariant(ipAddress, 'could not determine requesting IP address'); return ipAddress; } function getSessionParameterInfoFromRequestBody( req: $Request, ): SessionParameterInfo { const body = (req.body: any); let sessionID = body.sessionID !== undefined || req.method !== 'GET' ? body.sessionID : null; if (sessionID === '') { sessionID = null; } const sessionIdentifierType = req.method === 'GET' || sessionID !== undefined ? sessionIdentifierTypes.BODY_SESSION_ID : sessionIdentifierTypes.COOKIE_ID; return { isSocket: false, sessionID, sessionIdentifierType, ipAddress: getRequestIPAddress(req), userAgent: req.get('User-Agent'), }; } async function fetchViewerForJSONRequest(req: $Request): Promise { assertSecureRequest(req); const sessionParameterInfo = getSessionParameterInfoFromRequestBody(req); let result = await fetchViewerFromRequestBody(req.body, sessionParameterInfo); if ( result.type === 'nonexistant' && (result.cookieSource === null || result.cookieSource === undefined) ) { result = await fetchViewerFromCookieData(req, sessionParameterInfo); } return await handleFetchViewerResult(result); } const webPlatformDetails = { platform: 'web' }; async function fetchViewerForHomeRequest(req: $Request): Promise { assertSecureRequest(req); const sessionParameterInfo = getSessionParameterInfoFromRequestBody(req); const result = await fetchViewerFromCookieData(req, sessionParameterInfo); return await handleFetchViewerResult(result, webPlatformDetails); } async function fetchViewerForSocket( req: $Request, clientMessage: InitialClientSocketMessage, ): Promise { assertSecureRequest(req); const { sessionIdentification } = clientMessage.payload; const { sessionID } = sessionIdentification; const sessionParameterInfo = { isSocket: true, sessionID, sessionIdentifierType: sessionID !== undefined ? sessionIdentifierTypes.BODY_SESSION_ID : sessionIdentifierTypes.COOKIE_ID, ipAddress: getRequestIPAddress(req), userAgent: req.get('User-Agent'), }; let result = await fetchViewerFromRequestBody( clientMessage.payload.sessionIdentification, sessionParameterInfo, ); if ( result.type === 'nonexistant' && (result.cookieSource === null || result.cookieSource === undefined) ) { result = await fetchViewerFromCookieData(req, sessionParameterInfo); } if (result.type === 'valid') { return result.viewer; } const promises = {}; if (result.cookieSource === cookieSources.BODY) { // We initialize a socket's Viewer after the WebSocket handshake, since to // properly initialize the Viewer we need a bunch of data, but that data // can't be sent until after the handshake. Consequently, by the time we // know that a cookie may be invalid, we are no longer communicating via // HTTP, and have no way to set a new cookie for HEADER (web) clients. const platformDetails = result.type === 'invalidated' ? result.platformDetails : null; const deviceToken = result.type === 'invalidated' ? result.deviceToken : null; promises.anonymousViewerData = createNewAnonymousCookie({ platformDetails, deviceToken, }); } if (result.type === 'invalidated') { promises.deleteCookie = deleteCookie(result.cookieID); } const { anonymousViewerData } = await promiseAll(promises); if (!anonymousViewerData) { return null; } return createViewerForInvalidFetchViewerResult(result, anonymousViewerData); } async function handleFetchViewerResult( result: FetchViewerResult, inputPlatformDetails?: PlatformDetails, ) { if (result.type === 'valid') { return result.viewer; } let platformDetails = inputPlatformDetails; if (!platformDetails && result.type === 'invalidated') { platformDetails = result.platformDetails; } const deviceToken = result.type === 'invalidated' ? result.deviceToken : null; const [anonymousViewerData] = await Promise.all([ createNewAnonymousCookie({ platformDetails, deviceToken }), result.type === 'invalidated' ? deleteCookie(result.cookieID) : null, ]); return createViewerForInvalidFetchViewerResult(result, anonymousViewerData); } function createViewerForInvalidFetchViewerResult( result: InvalidFetchViewerResult, anonymousViewerData: AnonymousViewerData, ): Viewer { // If a null cookie was specified in the request body, result.cookieSource // will still be BODY here. The only way it would be null or undefined here // is if there was no cookie specified in either the body or the header, in // which case we default to returning the new cookie in the response header. const cookieSource = result.cookieSource !== null && result.cookieSource !== undefined ? result.cookieSource : cookieSources.HEADER; const viewer = new Viewer({ ...anonymousViewerData, cookieSource, sessionIdentifierType: result.sessionParameterInfo.sessionIdentifierType, isSocket: result.sessionParameterInfo.isSocket, ipAddress: result.sessionParameterInfo.ipAddress, userAgent: result.sessionParameterInfo.userAgent, }); viewer.sessionChanged = true; // If cookieName is falsey, that tells us that there was no cookie specified // in the request, which means we can't be invalidating anything. if (result.cookieName) { viewer.cookieInvalidated = true; viewer.initialCookieName = result.cookieName; } return viewer; } function addSessionChangeInfoToResult( viewer: Viewer, res: $Response, result: Object, appURLFacts: AppURLFacts, ) { let threadInfos = {}, userInfos = {}; if (result.cookieChange) { ({ threadInfos, userInfos } = result.cookieChange); } let sessionChange; if (viewer.cookieInvalidated) { sessionChange = ({ cookieInvalidated: true, threadInfos, userInfos: (values(userInfos).map(a => a): UserInfo[]), currentUserInfo: { id: viewer.cookieID, anonymous: true, }, }: ServerSessionChange); } else { sessionChange = ({ cookieInvalidated: false, threadInfos, userInfos: (values(userInfos).map(a => a): UserInfo[]), }: ServerSessionChange); } if (viewer.cookieSource === cookieSources.BODY) { sessionChange.cookie = viewer.cookiePairString; } else { addActualHTTPCookie(viewer, res, appURLFacts); } if (viewer.sessionIdentifierType === sessionIdentifierTypes.BODY_SESSION_ID) { sessionChange.sessionID = viewer.sessionID ? viewer.sessionID : null; } result.cookieChange = sessionChange; } type AnonymousCookieCreationParams = Shape<{ +platformDetails: ?PlatformDetails, +deviceToken: ?string, }>; const defaultPlatformDetails = {}; // The result of this function should not be passed directly to the Viewer // constructor. Instead, it should be passed to viewer.setNewCookie. There are // several fields on AnonymousViewerData that are not set by this function: // sessionIdentifierType, cookieSource, ipAddress, and userAgent. These // parameters all depend on the initial request. If the result of this function // is passed to the Viewer constructor directly, the resultant Viewer object // will throw whenever anybody attempts to access the relevant properties. async function createNewAnonymousCookie( params: AnonymousCookieCreationParams, ): Promise { const { platformDetails, deviceToken } = params; const { platform, ...versions } = platformDetails || defaultPlatformDetails; const versionsString = Object.keys(versions).length > 0 ? JSON.stringify(versions) : null; const time = Date.now(); const cookiePassword = crypto.randomBytes(32).toString('hex'); const cookieHash = bcrypt.hashSync(cookiePassword); const [[id]] = await Promise.all([ createIDs('cookies', 1), deviceToken ? clearDeviceToken(deviceToken) : undefined, ]); const cookieRow = [ id, cookieHash, null, platform, time, time, deviceToken, versionsString, ]; const query = SQL` INSERT INTO cookies(id, hash, user, platform, creation_time, last_used, device_token, versions) VALUES ${[cookieRow]} `; await dbQuery(query); return { loggedIn: false, id, platformDetails, deviceToken, cookieID: id, cookiePassword, sessionID: undefined, sessionInfo: null, cookieInsertedThisRequest: true, isScriptViewer: false, }; } type UserCookieCreationParams = { platformDetails: PlatformDetails, deviceToken?: ?string, primaryIdentityPublicKey?: ?string, socialProof?: ?SIWESocialProof, }; // The result of this function should never be passed directly to the Viewer // constructor. Instead, it should be passed to viewer.setNewCookie. There are // several fields on UserViewerData that are not set by this function: // sessionID, sessionIdentifierType, cookieSource, and ipAddress. These // parameters all depend on the initial request. If the result of this function // is passed to the Viewer constructor directly, the resultant Viewer object // will throw whenever anybody attempts to access the relevant properties. async function createNewUserCookie( userID: string, params: UserCookieCreationParams, ): Promise { const { platformDetails, deviceToken, primaryIdentityPublicKey, socialProof, } = params; const { platform, ...versions } = platformDetails || defaultPlatformDetails; const versionsString = Object.keys(versions).length > 0 ? JSON.stringify(versions) : null; const time = Date.now(); const cookiePassword = crypto.randomBytes(32).toString('hex'); const cookieHash = bcrypt.hashSync(cookiePassword); const [[cookieID]] = await Promise.all([ createIDs('cookies', 1), deviceToken ? clearDeviceToken(deviceToken) : undefined, ]); const cookieRow = [ cookieID, cookieHash, userID, platform, time, time, deviceToken, versionsString, primaryIdentityPublicKey, JSON.stringify(socialProof), ]; const query = SQL` INSERT INTO cookies(id, hash, user, platform, creation_time, last_used, device_token, versions, public_key, social_proof) VALUES ${[cookieRow]} `; await dbQuery(query); return { loggedIn: true, id: userID, platformDetails, deviceToken, userID, cookieID, sessionID: undefined, sessionInfo: null, cookiePassword, cookieInsertedThisRequest: true, isScriptViewer: false, }; } // This gets called after createNewUserCookie and from websiteResponder. If the // Viewer's sessionIdentifierType is COOKIE_ID then the cookieID is used as the // session identifier; otherwise, a new ID is created for the session. async function setNewSession( viewer: Viewer, calendarQuery: CalendarQuery, initialLastUpdate: number, ): Promise { if (viewer.sessionIdentifierType !== sessionIdentifierTypes.COOKIE_ID) { const [sessionID] = await createIDs('sessions', 1); viewer.setSessionID(sessionID); } await createSession(viewer, calendarQuery, initialLastUpdate); } async function extendCookieLifespan(cookieID: string) { const time = Date.now(); const query = SQL` UPDATE cookies SET last_used = ${time} WHERE id = ${cookieID} `; await dbQuery(query); } function addCookieToJSONResponse( viewer: Viewer, res: $Response, result: Object, expectCookieInvalidation: boolean, appURLFacts: AppURLFacts, ) { if (expectCookieInvalidation) { viewer.cookieInvalidated = false; } if (!viewer.getData().cookieInsertedThisRequest) { handleAsyncPromise(extendCookieLifespan(viewer.cookieID)); } if (viewer.sessionChanged) { addSessionChangeInfoToResult(viewer, res, result, appURLFacts); } else if (viewer.cookieSource !== cookieSources.BODY) { addActualHTTPCookie(viewer, res, appURLFacts); } } function addCookieToHomeResponse( viewer: Viewer, res: $Response, appURLFacts: AppURLFacts, ) { if (!viewer.getData().cookieInsertedThisRequest) { handleAsyncPromise(extendCookieLifespan(viewer.cookieID)); } addActualHTTPCookie(viewer, res, appURLFacts); } function getCookieOptions(appURLFacts: AppURLFacts) { const { baseDomain, basePath, https } = appURLFacts; const domainAsURL = new url.URL(baseDomain); return { domain: domainAsURL.hostname, path: basePath, httpOnly: true, secure: https, maxAge: cookieLifetime, sameSite: 'Strict', }; } function addActualHTTPCookie( viewer: Viewer, res: $Response, appURLFacts: AppURLFacts, ) { res.cookie( viewer.cookieName, viewer.cookieString, getCookieOptions(appURLFacts), ); if (viewer.cookieName !== viewer.initialCookieName) { res.clearCookie(viewer.initialCookieName, getCookieOptions(appURLFacts)); } } async function setCookiePlatform( viewer: Viewer, platform: Platform, ): Promise { const newPlatformDetails = { ...viewer.platformDetails, platform }; viewer.setPlatformDetails(newPlatformDetails); const query = SQL` UPDATE cookies SET platform = ${platform} WHERE id = ${viewer.cookieID} `; await dbQuery(query); } async function setCookiePlatformDetails( viewer: Viewer, platformDetails: PlatformDetails, ): Promise { if ( hasMinCodeVersion(platformDetails, 70) && !hasMinCodeVersion(viewer.platformDetails, 70) ) { await updateThreadMembers(viewer); } viewer.setPlatformDetails(platformDetails); const { platform, ...versions } = platformDetails; const versionsString = Object.keys(versions).length > 0 ? JSON.stringify(versions) : null; const query = SQL` UPDATE cookies SET platform = ${platform}, versions = ${versionsString} WHERE id = ${viewer.cookieID} `; await dbQuery(query); } export { fetchViewerForJSONRequest, fetchViewerForHomeRequest, fetchViewerForSocket, createNewAnonymousCookie, createNewUserCookie, setNewSession, extendCookieLifespan, addCookieToJSONResponse, addCookieToHomeResponse, setCookiePlatform, setCookiePlatformDetails, }; diff --git a/keyserver/src/session/scripts.js b/keyserver/src/session/scripts.js index 4922f47bf..de3a87aef 100644 --- a/keyserver/src/session/scripts.js +++ b/keyserver/src/session/scripts.js @@ -1,24 +1,24 @@ // @flow -import { Viewer } from './viewer'; +import { Viewer } from './viewer.js'; // Note that since the returned Viewer doesn't have a valid cookieID or // sessionID, a lot of things can go wrong when trying to use it with certain // functions. function createScriptViewer(userID: string): Viewer { return new Viewer({ isSocket: true, loggedIn: true, id: userID, platformDetails: null, deviceToken: null, userID, cookieID: null, cookiePassword: null, sessionID: null, sessionInfo: null, isScriptViewer: true, }); } export { createScriptViewer }; diff --git a/keyserver/src/session/version.js b/keyserver/src/session/version.js index 2e88b6659..9477feab2 100644 --- a/keyserver/src/session/version.js +++ b/keyserver/src/session/version.js @@ -1,21 +1,21 @@ // @flow -import { hasMinCodeVersion } from 'lib/shared/version-utils'; -import type { PlatformDetails } from 'lib/types/device-types'; -import { ServerError } from 'lib/utils/errors'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; +import type { PlatformDetails } from 'lib/types/device-types.js'; +import { ServerError } from 'lib/utils/errors.js'; -import type { Viewer } from './viewer'; +import type { Viewer } from './viewer.js'; async function verifyClientSupported( viewer: Viewer, platformDetails: ?PlatformDetails, ) { if (hasMinCodeVersion(platformDetails, 31)) { return; } const error = new ServerError('client_version_unsupported'); error.platformDetails = platformDetails; throw error; } export { verifyClientSupported }; diff --git a/keyserver/src/session/viewer.js b/keyserver/src/session/viewer.js index 1655654e2..e933038b5 100644 --- a/keyserver/src/session/viewer.js +++ b/keyserver/src/session/viewer.js @@ -1,340 +1,340 @@ // @flow import geoip from 'geoip-lite'; import invariant from 'invariant'; -import type { Platform, PlatformDetails } from 'lib/types/device-types'; -import type { CalendarQuery } from 'lib/types/entry-types'; +import type { Platform, PlatformDetails } from 'lib/types/device-types.js'; +import type { CalendarQuery } from 'lib/types/entry-types.js'; import { type CookieSource, type SessionIdentifierType, cookieTypes, type CookieType, sessionIdentifierTypes, -} from 'lib/types/session-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/session-types.js'; +import { ServerError } from 'lib/utils/errors.js'; export type UserViewerData = { +loggedIn: true, +id: string, +platformDetails: ?PlatformDetails, +deviceToken: ?string, +userID: string, +cookieID: ?string, +cookieSource?: CookieSource, +cookiePassword: ?string, +cookieInsertedThisRequest?: boolean, +sessionIdentifierType?: SessionIdentifierType, +sessionID: ?string, +sessionInfo: ?SessionInfo, +isScriptViewer: boolean, +isSocket?: boolean, +ipAddress?: string, +userAgent?: ?string, }; export type AnonymousViewerData = { +loggedIn: false, +id: string, +platformDetails: ?PlatformDetails, +deviceToken: ?string, +cookieSource?: CookieSource, +cookieID: string, +cookiePassword: ?string, +cookieInsertedThisRequest?: boolean, +sessionIdentifierType?: SessionIdentifierType, +sessionID: ?string, +sessionInfo: ?SessionInfo, +isScriptViewer: boolean, +isSocket?: boolean, +ipAddress?: string, +userAgent?: ?string, }; type SessionInfo = { +lastValidated: number, +lastUpdate: number, +calendarQuery: CalendarQuery, }; export type ViewerData = UserViewerData | AnonymousViewerData; class Viewer { data: ViewerData; sessionChanged: boolean = false; cookieInvalidated: boolean = false; initialCookieName: string; cachedTimeZone: ?string; constructor(data: ViewerData) { this.data = data; this.initialCookieName = Viewer.cookieNameFromViewerData(data); } static cookieNameFromViewerData(data: ViewerData): CookieType { return data.loggedIn ? cookieTypes.USER : cookieTypes.ANONYMOUS; } getData(): ViewerData { return this.data; } setNewCookie(data: ViewerData) { if (data.cookieSource === null || data.cookieSource === undefined) { if (data.loggedIn) { data = { ...data, cookieSource: this.cookieSource }; } else { // This is a separate condition because of Flow data = { ...data, cookieSource: this.cookieSource }; } } if ( data.sessionIdentifierType === null || data.sessionIdentifierType === undefined ) { if (data.loggedIn) { data = { ...data, sessionIdentifierType: this.sessionIdentifierType }; } else { // This is a separate condition because of Flow data = { ...data, sessionIdentifierType: this.sessionIdentifierType }; } } if (data.isSocket === null || data.isSocket === undefined) { if (data.loggedIn) { data = { ...data, isSocket: this.isSocket }; } else { // This is a separate condition because of Flow data = { ...data, isSocket: this.isSocket }; } } if (data.ipAddress === null || data.ipAddress === undefined) { if (data.loggedIn) { data = { ...data, ipAddress: this.ipAddress }; } else { // This is a separate condition because of Flow data = { ...data, ipAddress: this.ipAddress }; } } else { this.cachedTimeZone = undefined; } if (data.userAgent === null || data.userAgent === undefined) { if (data.loggedIn) { data = { ...data, userAgent: this.userAgent }; } else { // This is a separate condition because of Flow data = { ...data, userAgent: this.userAgent }; } } this.data = data; this.sessionChanged = true; // If the request explicitly sets a new cookie, there's no point in telling // the client that their old cookie is invalid. Note that clients treat // cookieInvalidated as a forced log-out, which isn't necessary here. this.cookieInvalidated = false; } setSessionID(sessionID: string) { if (sessionID === this.sessionID) { return; } this.sessionChanged = true; if (this.data.loggedIn) { this.data = { ...this.data, sessionID }; } else { // This is a separate condition because of Flow this.data = { ...this.data, sessionID }; } } setSessionInfo(sessionInfo: SessionInfo) { if (this.data.loggedIn) { this.data = { ...this.data, sessionInfo }; } else { // This is a separate condition because of Flow this.data = { ...this.data, sessionInfo }; } } setDeviceToken(deviceToken: string) { if (this.data.loggedIn) { this.data = { ...this.data, deviceToken }; } else { // This is a separate condition because of Flow this.data = { ...this.data, deviceToken }; } } setPlatformDetails(platformDetails: PlatformDetails) { if (this.data.loggedIn) { this.data = { ...this.data, platformDetails }; } else { // This is a separate condition because of Flow this.data = { ...this.data, platformDetails }; } } get id(): string { return this.data.id; } get loggedIn(): boolean { return this.data.loggedIn; } get cookieSource(): CookieSource { const { cookieSource } = this.data; invariant( cookieSource !== null && cookieSource !== undefined, 'Viewer.cookieSource should be set', ); return cookieSource; } get cookieID(): string { const { cookieID } = this.data; invariant( cookieID !== null && cookieID !== undefined, 'Viewer.cookieID should be set', ); return cookieID; } get cookiePassword(): string { const { cookiePassword } = this.data; invariant( cookiePassword !== null && cookiePassword !== undefined, 'Viewer.cookieID should be set', ); return cookiePassword; } get sessionIdentifierType(): SessionIdentifierType { const { sessionIdentifierType } = this.data; invariant( sessionIdentifierType !== null && sessionIdentifierType !== undefined, 'Viewer.sessionIdentifierType should be set', ); return sessionIdentifierType; } // This is used in the case of sessionIdentifierTypes.BODY_SESSION_ID only. // It will be falsey otherwise. Use session below if you want the actual // session identifier in all cases. get sessionID(): ?string { return this.data.sessionID; } get session(): string { if (this.sessionIdentifierType === sessionIdentifierTypes.COOKIE_ID) { return this.cookieID; } else if (this.sessionID) { return this.sessionID; } else if (!this.loggedIn) { throw new ServerError('not_logged_in'); } else { // If the session identifier is sessionIdentifierTypes.BODY_SESSION_ID and // the user is logged in, then the sessionID should be set. throw new ServerError('unknown_error'); } } get hasSessionInfo(): boolean { const { sessionInfo } = this.data; return !!sessionInfo; } get sessionLastValidated(): number { const { sessionInfo } = this.data; invariant( sessionInfo !== null && sessionInfo !== undefined, 'Viewer.sessionInfo should be set', ); return sessionInfo.lastValidated; } get sessionLastUpdated(): number { const { sessionInfo } = this.data; invariant( sessionInfo !== null && sessionInfo !== undefined, 'Viewer.sessionInfo should be set', ); return sessionInfo.lastUpdate; } get calendarQuery(): CalendarQuery { const { sessionInfo } = this.data; invariant( sessionInfo !== null && sessionInfo !== undefined, 'Viewer.sessionInfo should be set', ); return sessionInfo.calendarQuery; } get userID(): string { if (!this.data.userID) { throw new ServerError('not_logged_in'); } return this.data.userID; } get cookieName(): string { return Viewer.cookieNameFromViewerData(this.data); } get cookieString(): string { return `${this.cookieID}:${this.cookiePassword}`; } get cookiePairString(): string { return `${this.cookieName}=${this.cookieString}`; } get platformDetails(): ?PlatformDetails { return this.data.platformDetails; } get platform(): ?Platform { return this.data.platformDetails ? this.data.platformDetails.platform : null; } get deviceToken(): ?string { return this.data.deviceToken; } get isScriptViewer(): boolean { return this.data.isScriptViewer; } get isSocket(): boolean { invariant( this.data.isSocket !== null && this.data.isSocket !== undefined, 'isSocket should be set', ); return this.data.isSocket; } get ipAddress(): string { invariant( this.data.ipAddress !== null && this.data.ipAddress !== undefined, 'ipAddress should be set', ); return this.data.ipAddress; } get userAgent(): ?string { return this.data.userAgent; } get timeZone(): ?string { if (this.cachedTimeZone === undefined) { const geoData = geoip.lookup(this.ipAddress); this.cachedTimeZone = geoData ? geoData.timezone : null; } return this.cachedTimeZone; } } export { Viewer }; diff --git a/keyserver/src/socket/redis.js b/keyserver/src/socket/redis.js index a4bccaa01..62ccfde4c 100644 --- a/keyserver/src/socket/redis.js +++ b/keyserver/src/socket/redis.js @@ -1,115 +1,115 @@ // @flow import type { RedisClient } from 'redis'; import redis from 'redis'; -import uuidv4 from 'uuid/v4'; +import uuidv4 from 'uuid/v4.js'; import { redisMessageTypes, type RedisMessage, type UpdateTarget, type SessionIdentifier, -} from 'lib/types/redis-types'; +} from 'lib/types/redis-types.js'; -import { getScriptContext } from '../scripts/script-context'; +import { getScriptContext } from '../scripts/script-context.js'; function channelNameForUpdateTarget(updateTarget: UpdateTarget): string { if (updateTarget.sessionID) { return `user.${updateTarget.userID}.${updateTarget.sessionID}`; } else { return `user.${updateTarget.userID}`; } } const redisConfig = (() => { if (!process.env.REDIS_URL) { return undefined; } return { url: process.env.REDIS_URL, }; })(); class RedisPublisher { client: ?RedisClient; get pub(): RedisClient { if (!this.client) { this.client = redis.createClient(redisConfig); } return this.client; } sendMessage(target: UpdateTarget, message: RedisMessage) { const channelName = channelNameForUpdateTarget(target); const stringifiedMessage = JSON.stringify(message); const scriptContext = getScriptContext(); if (scriptContext && scriptContext.dryRun) { console.log(`Redis publish to ${channelName}: ${stringifiedMessage}`); return; } this.pub.publish(channelName, stringifiedMessage); } end() { this.client?.unref(); } } const publisher: RedisPublisher = new RedisPublisher(); type OnMessage = (message: RedisMessage) => void | Promise; class RedisSubscriber { sub: RedisClient; instanceID: string; onMessageCallback: OnMessage; constructor(sessionIdentifier: SessionIdentifier, onMessage: OnMessage) { this.sub = redis.createClient(redisConfig); this.instanceID = uuidv4(); this.onMessageCallback = onMessage; const { userID } = sessionIdentifier; this.sub.subscribe(channelNameForUpdateTarget({ userID })); this.sub.subscribe(channelNameForUpdateTarget(sessionIdentifier)); publisher.sendMessage(sessionIdentifier, { type: redisMessageTypes.START_SUBSCRIPTION, instanceID: this.instanceID, }); this.sub.on('message', this.onMessage); } static messageFromString(messageString: string): ?RedisMessage { try { return JSON.parse(messageString); } catch (e) { console.log(e); return null; } } onMessage: (channel: string, messageString: string) => void = ( channel, messageString, ) => { const message = RedisSubscriber.messageFromString(messageString); if (!message) { return; } if (message.type === redisMessageTypes.START_SUBSCRIPTION) { if (message.instanceID === this.instanceID) { return; } else { this.quit(); } } this.onMessageCallback(message); }; quit() { this.sub.quit(); } } export { channelNameForUpdateTarget, publisher, RedisSubscriber }; diff --git a/keyserver/src/socket/session-utils.js b/keyserver/src/socket/session-utils.js index ad2f7815e..344b0645c 100644 --- a/keyserver/src/socket/session-utils.js +++ b/keyserver/src/socket/session-utils.js @@ -1,563 +1,563 @@ // @flow import invariant from 'invariant'; import t from 'tcomb'; import type { TUnion, TInterface } from 'tcomb'; import { usersInRawEntryInfos, serverEntryInfo, serverEntryInfosObject, -} from 'lib/shared/entry-utils'; -import { usersInThreadInfo } from 'lib/shared/thread-utils'; -import { hasMinCodeVersion } from 'lib/shared/version-utils'; -import type { UpdateActivityResult } from 'lib/types/activity-types'; -import { isDeviceType } from 'lib/types/device-types'; +} from 'lib/shared/entry-utils.js'; +import { usersInThreadInfo } from 'lib/shared/thread-utils.js'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; +import type { UpdateActivityResult } from 'lib/types/activity-types.js'; +import { isDeviceType } from 'lib/types/device-types.js'; import type { CalendarQuery, DeltaEntryInfosResponse, -} from 'lib/types/entry-types'; +} from 'lib/types/entry-types.js'; import { reportTypes, type ThreadInconsistencyReportCreationRequest, type EntryInconsistencyReportCreationRequest, -} from 'lib/types/report-types'; +} from 'lib/types/report-types.js'; import { serverRequestTypes, type ThreadInconsistencyClientResponse, type EntryInconsistencyClientResponse, type ClientResponse, type ServerServerRequest, type ServerCheckStateServerRequest, -} from 'lib/types/request-types'; -import { sessionCheckFrequency } from 'lib/types/session-types'; -import { hash } from 'lib/utils/objects'; -import { promiseAll } from 'lib/utils/promises'; +} from 'lib/types/request-types.js'; +import { sessionCheckFrequency } from 'lib/types/session-types.js'; +import { hash } from 'lib/utils/objects.js'; +import { promiseAll } from 'lib/utils/promises.js'; import { tShape, tPlatform, tPlatformDetails, -} from 'lib/utils/validation-utils'; +} from 'lib/utils/validation-utils.js'; -import { saveOneTimeKeys } from '../creators/one-time-keys-creator'; -import createReport from '../creators/report-creator'; -import { SQL } from '../database/database'; +import { saveOneTimeKeys } from '../creators/one-time-keys-creator.js'; +import createReport from '../creators/report-creator.js'; +import { SQL } from '../database/database.js'; import { fetchEntryInfos, fetchEntryInfosByID, fetchEntriesForSession, -} from '../fetchers/entry-fetchers'; -import { checkIfSessionHasEnoughOneTimeKeys } from '../fetchers/key-fetchers'; -import { fetchThreadInfos } from '../fetchers/thread-fetchers'; +} from '../fetchers/entry-fetchers.js'; +import { checkIfSessionHasEnoughOneTimeKeys } from '../fetchers/key-fetchers.js'; +import { fetchThreadInfos } from '../fetchers/thread-fetchers.js'; import { fetchCurrentUserInfo, fetchUserInfos, fetchKnownUserInfos, -} from '../fetchers/user-fetchers'; -import { activityUpdatesInputValidator } from '../responders/activity-responders'; -import { handleAsyncPromise } from '../responders/handlers'; +} from '../fetchers/user-fetchers.js'; +import { activityUpdatesInputValidator } from '../responders/activity-responders.js'; +import { handleAsyncPromise } from '../responders/handlers.js'; import { threadInconsistencyReportValidatorShape, entryInconsistencyReportValidatorShape, -} from '../responders/report-responders'; +} from '../responders/report-responders.js'; import { setNewSession, setCookiePlatform, setCookiePlatformDetails, -} from '../session/cookies'; -import type { Viewer } from '../session/viewer'; -import { activityUpdater } from '../updaters/activity-updaters'; -import { compareNewCalendarQuery } from '../updaters/entry-updaters'; -import type { SessionUpdate } from '../updaters/session-updaters'; +} from '../session/cookies.js'; +import type { Viewer } from '../session/viewer.js'; +import { activityUpdater } from '../updaters/activity-updaters.js'; +import { compareNewCalendarQuery } from '../updaters/entry-updaters.js'; +import type { SessionUpdate } from '../updaters/session-updaters.js'; const clientResponseInputValidator: TUnion = t.union([ tShape({ type: t.irreducible( 'serverRequestTypes.PLATFORM', x => x === serverRequestTypes.PLATFORM, ), platform: tPlatform, }), tShape({ ...threadInconsistencyReportValidatorShape, type: t.irreducible( 'serverRequestTypes.THREAD_INCONSISTENCY', x => x === serverRequestTypes.THREAD_INCONSISTENCY, ), }), tShape({ ...entryInconsistencyReportValidatorShape, type: t.irreducible( 'serverRequestTypes.ENTRY_INCONSISTENCY', x => x === serverRequestTypes.ENTRY_INCONSISTENCY, ), }), tShape({ type: t.irreducible( 'serverRequestTypes.PLATFORM_DETAILS', x => x === serverRequestTypes.PLATFORM_DETAILS, ), platformDetails: tPlatformDetails, }), tShape({ type: t.irreducible( 'serverRequestTypes.CHECK_STATE', x => x === serverRequestTypes.CHECK_STATE, ), hashResults: t.dict(t.String, t.Boolean), }), tShape({ type: t.irreducible( 'serverRequestTypes.INITIAL_ACTIVITY_UPDATES', x => x === serverRequestTypes.INITIAL_ACTIVITY_UPDATES, ), activityUpdates: activityUpdatesInputValidator, }), tShape({ type: t.irreducible( 'serverRequestTypes.MORE_ONE_TIME_KEYS', x => x === serverRequestTypes.MORE_ONE_TIME_KEYS, ), keys: t.list(t.String), }), ]); type StateCheckStatus = | { status: 'state_validated' } | { status: 'state_invalid', invalidKeys: $ReadOnlyArray } | { status: 'state_check' }; type ProcessClientResponsesResult = { serverRequests: ServerServerRequest[], stateCheckStatus: ?StateCheckStatus, activityUpdateResult: ?UpdateActivityResult, }; async function processClientResponses( viewer: Viewer, clientResponses: $ReadOnlyArray, ): Promise { let viewerMissingPlatform = !viewer.platform; const { platformDetails } = viewer; let viewerMissingPlatformDetails = !platformDetails || (isDeviceType(viewer.platform) && (platformDetails.codeVersion === null || platformDetails.codeVersion === undefined || platformDetails.stateVersion === null || platformDetails.stateVersion === undefined)); const promises = []; let activityUpdates = []; let stateCheckStatus = null; const clientSentPlatformDetails = clientResponses.some( response => response.type === serverRequestTypes.PLATFORM_DETAILS, ); for (const clientResponse of clientResponses) { if ( clientResponse.type === serverRequestTypes.PLATFORM && !clientSentPlatformDetails ) { promises.push(setCookiePlatform(viewer, clientResponse.platform)); viewerMissingPlatform = false; if (!isDeviceType(clientResponse.platform)) { viewerMissingPlatformDetails = false; } } else if ( clientResponse.type === serverRequestTypes.THREAD_INCONSISTENCY ) { promises.push(recordThreadInconsistency(viewer, clientResponse)); } else if (clientResponse.type === serverRequestTypes.ENTRY_INCONSISTENCY) { promises.push(recordEntryInconsistency(viewer, clientResponse)); } else if (clientResponse.type === serverRequestTypes.PLATFORM_DETAILS) { promises.push( setCookiePlatformDetails(viewer, clientResponse.platformDetails), ); viewerMissingPlatform = false; viewerMissingPlatformDetails = false; } else if ( clientResponse.type === serverRequestTypes.INITIAL_ACTIVITY_UPDATES ) { activityUpdates = [...activityUpdates, ...clientResponse.activityUpdates]; } else if (clientResponse.type === serverRequestTypes.CHECK_STATE) { const invalidKeys = []; for (const key in clientResponse.hashResults) { const result = clientResponse.hashResults[key]; if (!result) { invalidKeys.push(key); } } stateCheckStatus = invalidKeys.length > 0 ? { status: 'state_invalid', invalidKeys } : { status: 'state_validated' }; } else if (clientResponse.type === serverRequestTypes.MORE_ONE_TIME_KEYS) { invariant(clientResponse.keys, 'keys expected in client response'); handleAsyncPromise(saveOneTimeKeys(viewer, clientResponse.keys)); } } const activityUpdatePromise = (async () => { if (activityUpdates.length === 0) { return; } return await activityUpdater(viewer, { updates: activityUpdates }); })(); const serverRequests = []; const checkOneTimeKeysPromise = (async () => { if (!viewer.loggedIn) { return; } const enoughOneTimeKeys = await checkIfSessionHasEnoughOneTimeKeys( viewer.session, ); if (!enoughOneTimeKeys) { serverRequests.push({ type: serverRequestTypes.MORE_ONE_TIME_KEYS }); } })(); const { activityUpdateResult } = await promiseAll({ all: Promise.all(promises), activityUpdateResult: activityUpdatePromise, checkOneTimeKeysPromise, }); if ( !stateCheckStatus && viewer.loggedIn && viewer.sessionLastValidated + sessionCheckFrequency < Date.now() ) { stateCheckStatus = { status: 'state_check' }; } if (viewerMissingPlatform) { serverRequests.push({ type: serverRequestTypes.PLATFORM }); } if (viewerMissingPlatformDetails) { serverRequests.push({ type: serverRequestTypes.PLATFORM_DETAILS }); } return { serverRequests, stateCheckStatus, activityUpdateResult }; } async function recordThreadInconsistency( viewer: Viewer, response: ThreadInconsistencyClientResponse, ): Promise { const { type, ...rest } = response; const reportCreationRequest = ({ ...rest, type: reportTypes.THREAD_INCONSISTENCY, }: ThreadInconsistencyReportCreationRequest); await createReport(viewer, reportCreationRequest); } async function recordEntryInconsistency( viewer: Viewer, response: EntryInconsistencyClientResponse, ): Promise { const { type, ...rest } = response; const reportCreationRequest = ({ ...rest, type: reportTypes.ENTRY_INCONSISTENCY, }: EntryInconsistencyReportCreationRequest); await createReport(viewer, reportCreationRequest); } type SessionInitializationResult = | { sessionContinued: false } | { sessionContinued: true, deltaEntryInfoResult: DeltaEntryInfosResponse, sessionUpdate: SessionUpdate, }; async function initializeSession( viewer: Viewer, calendarQuery: CalendarQuery, oldLastUpdate: number, ): Promise { if (!viewer.loggedIn) { return { sessionContinued: false }; } if (!viewer.hasSessionInfo) { // If the viewer has no session info but is logged in, that is indicative // of an expired / invalidated session and we should generate a new one await setNewSession(viewer, calendarQuery, oldLastUpdate); return { sessionContinued: false }; } if (oldLastUpdate < viewer.sessionLastUpdated) { // If the client has an older last_update than the server is tracking for // that client, then the client either had some issue persisting its store, // or the user restored the client app from a backup. Either way, we should // invalidate the existing session, since the server has assumed that the // checkpoint is further along than it is on the client, and might not still // have all of the updates necessary to do an incremental update await setNewSession(viewer, calendarQuery, oldLastUpdate); return { sessionContinued: false }; } let comparisonResult = null; try { comparisonResult = compareNewCalendarQuery(viewer, calendarQuery); } catch (e) { if (e.message !== 'unknown_error') { throw e; } } if (comparisonResult) { const { difference, oldCalendarQuery } = comparisonResult; const sessionUpdate = { ...comparisonResult.sessionUpdate, lastUpdate: oldLastUpdate, }; const deltaEntryInfoResult = await fetchEntriesForSession( viewer, difference, oldCalendarQuery, ); return { sessionContinued: true, deltaEntryInfoResult, sessionUpdate }; } else { await setNewSession(viewer, calendarQuery, oldLastUpdate); return { sessionContinued: false }; } } type StateCheckResult = { sessionUpdate?: SessionUpdate, checkStateRequest?: ServerCheckStateServerRequest, }; async function checkState( viewer: Viewer, status: StateCheckStatus, calendarQuery: CalendarQuery, ): Promise { const shouldCheckUserInfos = hasMinCodeVersion(viewer.platformDetails, 59); if (status.status === 'state_validated') { return { sessionUpdate: { lastValidated: Date.now() } }; } else if (status.status === 'state_check') { const promises = { threadsResult: fetchThreadInfos(viewer), entriesResult: fetchEntryInfos(viewer, [calendarQuery]), currentUserInfo: fetchCurrentUserInfo(viewer), userInfosResult: undefined, }; if (shouldCheckUserInfos) { promises.userInfosResult = fetchKnownUserInfos(viewer); } const fetchedData = await promiseAll(promises); let hashesToCheck = { threadInfos: hash(fetchedData.threadsResult.threadInfos), entryInfos: hash( serverEntryInfosObject(fetchedData.entriesResult.rawEntryInfos), ), currentUserInfo: hash(fetchedData.currentUserInfo), }; if (shouldCheckUserInfos) { hashesToCheck = { ...hashesToCheck, userInfos: hash(fetchedData.userInfosResult), }; } const checkStateRequest = { type: serverRequestTypes.CHECK_STATE, hashesToCheck, }; return { checkStateRequest }; } const { invalidKeys } = status; let fetchAllThreads = false, fetchAllEntries = false, fetchAllUserInfos = false, fetchUserInfo = false; const threadIDsToFetch = [], entryIDsToFetch = [], userIDsToFetch = []; for (const key of invalidKeys) { if (key === 'threadInfos') { fetchAllThreads = true; } else if (key === 'entryInfos') { fetchAllEntries = true; } else if (key === 'userInfos') { fetchAllUserInfos = true; } else if (key === 'currentUserInfo') { fetchUserInfo = true; } else if (key.startsWith('threadInfo|')) { const [, threadID] = key.split('|'); threadIDsToFetch.push(threadID); } else if (key.startsWith('entryInfo|')) { const [, entryID] = key.split('|'); entryIDsToFetch.push(entryID); } else if (key.startsWith('userInfo|')) { const [, userID] = key.split('|'); userIDsToFetch.push(userID); } } const fetchPromises = {}; if (fetchAllThreads) { fetchPromises.threadsResult = fetchThreadInfos(viewer); } else if (threadIDsToFetch.length > 0) { fetchPromises.threadsResult = fetchThreadInfos( viewer, SQL`t.id IN (${threadIDsToFetch})`, ); } if (fetchAllEntries) { fetchPromises.entriesResult = fetchEntryInfos(viewer, [calendarQuery]); } else if (entryIDsToFetch.length > 0) { fetchPromises.entryInfos = fetchEntryInfosByID(viewer, entryIDsToFetch); } if (fetchAllUserInfos) { fetchPromises.userInfos = fetchKnownUserInfos(viewer); } else if (userIDsToFetch.length > 0) { fetchPromises.userInfos = fetchKnownUserInfos(viewer, userIDsToFetch); } if (fetchUserInfo) { fetchPromises.currentUserInfo = fetchCurrentUserInfo(viewer); } const fetchedData = await promiseAll(fetchPromises); const hashesToCheck = {}, failUnmentioned = {}, stateChanges = {}; for (const key of invalidKeys) { if (key === 'threadInfos') { // Instead of returning all threadInfos, we want to narrow down and figure // out which threadInfos don't match first const { threadInfos } = fetchedData.threadsResult; for (const threadID in threadInfos) { hashesToCheck[`threadInfo|${threadID}`] = hash(threadInfos[threadID]); } failUnmentioned.threadInfos = true; } else if (key === 'entryInfos') { // Instead of returning all entryInfos, we want to narrow down and figure // out which entryInfos don't match first const { rawEntryInfos } = fetchedData.entriesResult; for (const rawEntryInfo of rawEntryInfos) { const entryInfo = serverEntryInfo(rawEntryInfo); invariant(entryInfo, 'should be set'); const { id: entryID } = entryInfo; invariant(entryID, 'should be set'); hashesToCheck[`entryInfo|${entryID}`] = hash(entryInfo); } failUnmentioned.entryInfos = true; } else if (key === 'userInfos') { // Instead of returning all userInfos, we want to narrow down and figure // out which userInfos don't match first const { userInfos } = fetchedData; for (const userID in userInfos) { hashesToCheck[`userInfo|${userID}`] = hash(userInfos[userID]); } failUnmentioned.userInfos = true; } else if (key === 'currentUserInfo') { stateChanges.currentUserInfo = fetchedData.currentUserInfo; } else if (key.startsWith('threadInfo|')) { const [, threadID] = key.split('|'); const { threadInfos } = fetchedData.threadsResult; const threadInfo = threadInfos[threadID]; if (!threadInfo) { if (!stateChanges.deleteThreadIDs) { stateChanges.deleteThreadIDs = []; } stateChanges.deleteThreadIDs.push(threadID); continue; } if (!stateChanges.rawThreadInfos) { stateChanges.rawThreadInfos = []; } stateChanges.rawThreadInfos.push(threadInfo); } else if (key.startsWith('entryInfo|')) { const [, entryID] = key.split('|'); const rawEntryInfos = fetchedData.entriesResult ? fetchedData.entriesResult.rawEntryInfos : fetchedData.entryInfos; const entryInfo = rawEntryInfos.find( candidate => candidate.id === entryID, ); if (!entryInfo) { if (!stateChanges.deleteEntryIDs) { stateChanges.deleteEntryIDs = []; } stateChanges.deleteEntryIDs.push(entryID); continue; } if (!stateChanges.rawEntryInfos) { stateChanges.rawEntryInfos = []; } stateChanges.rawEntryInfos.push(entryInfo); } else if (key.startsWith('userInfo|')) { const { userInfos: fetchedUserInfos } = fetchedData; const [, userID] = key.split('|'); const userInfo = fetchedUserInfos[userID]; if (!userInfo || !userInfo.username) { if (!stateChanges.deleteUserInfoIDs) { stateChanges.deleteUserInfoIDs = []; } stateChanges.deleteUserInfoIDs.push(userID); } else { if (!stateChanges.userInfos) { stateChanges.userInfos = []; } stateChanges.userInfos.push({ ...userInfo, // Flow gets confused if we don't do this username: userInfo.username, }); } } } if (!shouldCheckUserInfos) { const userIDs = new Set(); if (stateChanges.rawThreadInfos) { for (const threadInfo of stateChanges.rawThreadInfos) { for (const userID of usersInThreadInfo(threadInfo)) { userIDs.add(userID); } } } if (stateChanges.rawEntryInfos) { for (const userID of usersInRawEntryInfos(stateChanges.rawEntryInfos)) { userIDs.add(userID); } } const userInfos = []; if (userIDs.size > 0) { const fetchedUserInfos = await fetchUserInfos([...userIDs]); for (const userID in fetchedUserInfos) { const userInfo = fetchedUserInfos[userID]; if (userInfo && userInfo.username) { const { id, username } = userInfo; userInfos.push({ id, username }); } } } if (userInfos.length > 0) { stateChanges.userInfos = userInfos; } } const checkStateRequest = { type: serverRequestTypes.CHECK_STATE, hashesToCheck, failUnmentioned, stateChanges, }; if (Object.keys(hashesToCheck).length === 0) { return { checkStateRequest, sessionUpdate: { lastValidated: Date.now() } }; } else { return { checkStateRequest }; } } export { clientResponseInputValidator, processClientResponses, initializeSession, checkState, }; diff --git a/keyserver/src/socket/socket.js b/keyserver/src/socket/socket.js index 1f19600f2..4229024e3 100644 --- a/keyserver/src/socket/socket.js +++ b/keyserver/src/socket/socket.js @@ -1,821 +1,821 @@ // @flow import type { $Request } from 'express'; import invariant from 'invariant'; -import _debounce from 'lodash/debounce'; +import _debounce from 'lodash/debounce.js'; import t from 'tcomb'; import WebSocket from 'ws'; import { baseLegalPolicies } from 'lib/facts/policies.js'; -import { mostRecentMessageTimestamp } from 'lib/shared/message-utils'; +import { mostRecentMessageTimestamp } from 'lib/shared/message-utils.js'; import { serverRequestSocketTimeout, serverResponseTimeout, -} from 'lib/shared/timeouts'; -import { mostRecentUpdateTimestamp } from 'lib/shared/update-utils'; -import type { Shape } from 'lib/types/core'; -import { endpointIsSocketSafe } from 'lib/types/endpoints'; -import { defaultNumberPerThread } from 'lib/types/message-types'; -import { redisMessageTypes, type RedisMessage } from 'lib/types/redis-types'; +} from 'lib/shared/timeouts.js'; +import { mostRecentUpdateTimestamp } from 'lib/shared/update-utils.js'; +import type { Shape } from 'lib/types/core.js'; +import { endpointIsSocketSafe } from 'lib/types/endpoints.js'; +import { defaultNumberPerThread } from 'lib/types/message-types.js'; +import { redisMessageTypes, type RedisMessage } from 'lib/types/redis-types.js'; import { cookieSources, sessionCheckFrequency, stateCheckInactivityActivationInterval, -} from 'lib/types/session-types'; +} from 'lib/types/session-types.js'; import { type ClientSocketMessage, type InitialClientSocketMessage, type ResponsesClientSocketMessage, type ServerStateSyncFullSocketPayload, type ServerServerSocketMessage, type ErrorServerSocketMessage, type AuthErrorServerSocketMessage, type PingClientSocketMessage, type AckUpdatesClientSocketMessage, type APIRequestClientSocketMessage, clientSocketMessageTypes, stateSyncPayloadTypes, serverSocketMessageTypes, -} from 'lib/types/socket-types'; -import { ServerError } from 'lib/utils/errors'; -import { values } from 'lib/utils/objects'; -import { promiseAll } from 'lib/utils/promises'; -import SequentialPromiseResolver from 'lib/utils/sequential-promise-resolver'; -import sleep from 'lib/utils/sleep'; -import { tShape, tCookie } from 'lib/utils/validation-utils'; - -import { fetchUpdateInfosWithRawUpdateInfos } from '../creators/update-creator'; -import { deleteActivityForViewerSession } from '../deleters/activity-deleters'; -import { deleteCookie } from '../deleters/cookie-deleters'; -import { deleteUpdatesBeforeTimeTargetingSession } from '../deleters/update-deleters'; -import { jsonEndpoints } from '../endpoints'; -import { fetchEntryInfos } from '../fetchers/entry-fetchers'; +} from 'lib/types/socket-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { values } from 'lib/utils/objects.js'; +import { promiseAll } from 'lib/utils/promises.js'; +import SequentialPromiseResolver from 'lib/utils/sequential-promise-resolver.js'; +import sleep from 'lib/utils/sleep.js'; +import { tShape, tCookie } from 'lib/utils/validation-utils.js'; + +import { fetchUpdateInfosWithRawUpdateInfos } from '../creators/update-creator.js'; +import { deleteActivityForViewerSession } from '../deleters/activity-deleters.js'; +import { deleteCookie } from '../deleters/cookie-deleters.js'; +import { deleteUpdatesBeforeTimeTargetingSession } from '../deleters/update-deleters.js'; +import { jsonEndpoints } from '../endpoints.js'; +import { fetchEntryInfos } from '../fetchers/entry-fetchers.js'; import { fetchMessageInfosSince, getMessageFetchResultFromRedisMessages, -} from '../fetchers/message-fetchers'; -import { fetchThreadInfos } from '../fetchers/thread-fetchers'; -import { fetchUpdateInfos } from '../fetchers/update-fetchers'; +} from '../fetchers/message-fetchers.js'; +import { fetchThreadInfos } from '../fetchers/thread-fetchers.js'; +import { fetchUpdateInfos } from '../fetchers/update-fetchers.js'; import { fetchCurrentUserInfo, fetchKnownUserInfos, -} from '../fetchers/user-fetchers'; +} from '../fetchers/user-fetchers.js'; import { newEntryQueryInputValidator, verifyCalendarQueryThreadIDs, -} from '../responders/entry-responders'; -import { handleAsyncPromise } from '../responders/handlers'; +} from '../responders/entry-responders.js'; +import { handleAsyncPromise } from '../responders/handlers.js'; import { fetchViewerForSocket, extendCookieLifespan, createNewAnonymousCookie, -} from '../session/cookies'; -import { Viewer } from '../session/viewer'; -import { commitSessionUpdate } from '../updaters/session-updaters'; -import { assertSecureRequest } from '../utils/security-utils'; +} from '../session/cookies.js'; +import { Viewer } from '../session/viewer.js'; +import { commitSessionUpdate } from '../updaters/session-updaters.js'; +import { assertSecureRequest } from '../utils/security-utils.js'; import { checkInputValidator, checkClientSupported, policiesValidator, -} from '../utils/validation-utils'; -import { RedisSubscriber } from './redis'; +} from '../utils/validation-utils.js'; +import { RedisSubscriber } from './redis.js'; import { clientResponseInputValidator, processClientResponses, initializeSession, checkState, -} from './session-utils'; +} from './session-utils.js'; const clientSocketMessageInputValidator = t.union([ tShape({ type: t.irreducible( 'clientSocketMessageTypes.INITIAL', x => x === clientSocketMessageTypes.INITIAL, ), id: t.Number, payload: tShape({ sessionIdentification: tShape({ cookie: t.maybe(tCookie), sessionID: t.maybe(t.String), }), sessionState: tShape({ calendarQuery: newEntryQueryInputValidator, messagesCurrentAsOf: t.Number, updatesCurrentAsOf: t.Number, watchedIDs: t.list(t.String), }), clientResponses: t.list(clientResponseInputValidator), }), }), tShape({ type: t.irreducible( 'clientSocketMessageTypes.RESPONSES', x => x === clientSocketMessageTypes.RESPONSES, ), id: t.Number, payload: tShape({ clientResponses: t.list(clientResponseInputValidator), }), }), tShape({ type: t.irreducible( 'clientSocketMessageTypes.PING', x => x === clientSocketMessageTypes.PING, ), id: t.Number, }), tShape({ type: t.irreducible( 'clientSocketMessageTypes.ACK_UPDATES', x => x === clientSocketMessageTypes.ACK_UPDATES, ), id: t.Number, payload: tShape({ currentAsOf: t.Number, }), }), tShape({ type: t.irreducible( 'clientSocketMessageTypes.API_REQUEST', x => x === clientSocketMessageTypes.API_REQUEST, ), id: t.Number, payload: tShape({ endpoint: t.String, input: t.Object, }), }), ]); function onConnection(ws: WebSocket, req: $Request) { assertSecureRequest(req); new Socket(ws, req); } type StateCheckConditions = { activityRecentlyOccurred: boolean, stateCheckOngoing: boolean, }; class Socket { ws: WebSocket; httpRequest: $Request; viewer: ?Viewer; redis: ?RedisSubscriber; redisPromiseResolver: SequentialPromiseResolver; stateCheckConditions: StateCheckConditions = { activityRecentlyOccurred: true, stateCheckOngoing: false, }; stateCheckTimeoutID: ?TimeoutID; constructor(ws: WebSocket, httpRequest: $Request) { this.ws = ws; this.httpRequest = httpRequest; ws.on('message', this.onMessage); ws.on('close', this.onClose); this.resetTimeout(); this.redisPromiseResolver = new SequentialPromiseResolver(this.sendMessage); } onMessage = async ( messageString: string | Buffer | ArrayBuffer | Array, ) => { invariant(typeof messageString === 'string', 'message should be string'); let clientSocketMessage: ?ClientSocketMessage; try { this.resetTimeout(); clientSocketMessage = JSON.parse(messageString); checkInputValidator( clientSocketMessageInputValidator, clientSocketMessage, ); if (clientSocketMessage.type === clientSocketMessageTypes.INITIAL) { if (this.viewer) { // This indicates that the user sent multiple INITIAL messages. throw new ServerError('socket_already_initialized'); } this.viewer = await fetchViewerForSocket( this.httpRequest, clientSocketMessage, ); if (!this.viewer) { // This indicates that the cookie was invalid, but the client is using // cookieSources.HEADER and thus can't accept a new cookie over // WebSockets. See comment under catch block for socket_deauthorized. throw new ServerError('socket_deauthorized'); } } const { viewer } = this; if (!viewer) { // This indicates a non-INITIAL message was sent by the client before // the INITIAL message. throw new ServerError('socket_uninitialized'); } if (viewer.sessionChanged) { // This indicates that the cookie was invalid, and we've assigned a new // anonymous one. throw new ServerError('socket_deauthorized'); } if (!viewer.loggedIn) { // This indicates that the specified cookie was an anonymous one. throw new ServerError('not_logged_in'); } await checkClientSupported( viewer, clientSocketMessageInputValidator, clientSocketMessage, ); await policiesValidator(viewer, baseLegalPolicies); const serverResponses = await this.handleClientSocketMessage( clientSocketMessage, ); if (!this.redis) { this.redis = new RedisSubscriber( { userID: viewer.userID, sessionID: viewer.session }, this.onRedisMessage, ); } if (viewer.sessionChanged) { // This indicates that something has caused the session to change, which // shouldn't happen from inside a WebSocket since we can't handle cookie // invalidation. throw new ServerError('session_mutated_from_socket'); } if (clientSocketMessage.type !== clientSocketMessageTypes.PING) { handleAsyncPromise(extendCookieLifespan(viewer.cookieID)); } for (const response of serverResponses) { this.sendMessage(response); } if (clientSocketMessage.type === clientSocketMessageTypes.INITIAL) { this.onSuccessfulConnection(); } } catch (error) { console.warn(error); if (!(error instanceof ServerError)) { const errorMessage: ErrorServerSocketMessage = { type: serverSocketMessageTypes.ERROR, message: error.message, }; const responseTo = clientSocketMessage ? clientSocketMessage.id : null; if (responseTo !== null) { errorMessage.responseTo = responseTo; } this.markActivityOccurred(); this.sendMessage(errorMessage); return; } invariant(clientSocketMessage, 'should be set'); const responseTo = clientSocketMessage.id; if (error.message === 'socket_deauthorized') { const authErrorMessage: AuthErrorServerSocketMessage = { type: serverSocketMessageTypes.AUTH_ERROR, responseTo, message: error.message, }; if (this.viewer) { // viewer should only be falsey for cookieSources.HEADER (web) // clients. Usually if the cookie is invalid we construct a new // anonymous Viewer with a new cookie, and then pass the cookie down // in the error. But we can't pass HTTP cookies in WebSocket messages. authErrorMessage.sessionChange = { cookie: this.viewer.cookiePairString, currentUserInfo: { id: this.viewer.cookieID, anonymous: true, }, }; } this.sendMessage(authErrorMessage); this.ws.close(4100, error.message); return; } else if (error.message === 'client_version_unsupported') { const { viewer } = this; invariant(viewer, 'should be set'); const promises = {}; promises.deleteCookie = deleteCookie(viewer.cookieID); if (viewer.cookieSource !== cookieSources.BODY) { promises.anonymousViewerData = createNewAnonymousCookie({ platformDetails: error.platformDetails, deviceToken: viewer.deviceToken, }); } const { anonymousViewerData } = await promiseAll(promises); const authErrorMessage: AuthErrorServerSocketMessage = { type: serverSocketMessageTypes.AUTH_ERROR, responseTo, message: error.message, }; if (anonymousViewerData) { // It is normally not safe to pass the result of // createNewAnonymousCookie to the Viewer constructor. That is because // createNewAnonymousCookie leaves several fields of // AnonymousViewerData unset, and consequently Viewer will throw when // access is attempted. It is only safe here because we can guarantee // that only cookiePairString and cookieID are accessed on anonViewer // below. const anonViewer = new Viewer(anonymousViewerData); authErrorMessage.sessionChange = { cookie: anonViewer.cookiePairString, currentUserInfo: { id: anonViewer.cookieID, anonymous: true, }, }; } this.sendMessage(authErrorMessage); this.ws.close(4101, error.message); return; } if (error.payload) { this.sendMessage({ type: serverSocketMessageTypes.ERROR, responseTo, message: error.message, payload: error.payload, }); } else { this.sendMessage({ type: serverSocketMessageTypes.ERROR, responseTo, message: error.message, }); } if (error.message === 'not_logged_in') { this.ws.close(4102, error.message); } else if (error.message === 'session_mutated_from_socket') { this.ws.close(4103, error.message); } else { this.markActivityOccurred(); } } }; onClose = async () => { this.clearStateCheckTimeout(); this.resetTimeout.cancel(); this.debouncedAfterActivity.cancel(); if (this.viewer && this.viewer.hasSessionInfo) { await deleteActivityForViewerSession(this.viewer); } if (this.redis) { this.redis.quit(); this.redis = null; } }; sendMessage = (message: ServerServerSocketMessage) => { invariant( this.ws.readyState > 0, "shouldn't send message until connection established", ); if (this.ws.readyState === 1) { this.ws.send(JSON.stringify(message)); } }; async handleClientSocketMessage( message: ClientSocketMessage, ): Promise { const resultPromise = (async () => { if (message.type === clientSocketMessageTypes.INITIAL) { this.markActivityOccurred(); return await this.handleInitialClientSocketMessage(message); } else if (message.type === clientSocketMessageTypes.RESPONSES) { this.markActivityOccurred(); return await this.handleResponsesClientSocketMessage(message); } else if (message.type === clientSocketMessageTypes.PING) { return this.handlePingClientSocketMessage(message); } else if (message.type === clientSocketMessageTypes.ACK_UPDATES) { this.markActivityOccurred(); return await this.handleAckUpdatesClientSocketMessage(message); } else if (message.type === clientSocketMessageTypes.API_REQUEST) { this.markActivityOccurred(); return await this.handleAPIRequestClientSocketMessage(message); } return []; })(); const timeoutPromise = (async () => { await sleep(serverResponseTimeout); throw new ServerError('socket_response_timeout'); })(); return await Promise.race([resultPromise, timeoutPromise]); } async handleInitialClientSocketMessage( message: InitialClientSocketMessage, ): Promise { const { viewer } = this; invariant(viewer, 'should be set'); const responses = []; const { sessionState, clientResponses } = message.payload; const { calendarQuery, updatesCurrentAsOf: oldUpdatesCurrentAsOf, messagesCurrentAsOf: oldMessagesCurrentAsOf, watchedIDs, } = sessionState; await verifyCalendarQueryThreadIDs(calendarQuery); const sessionInitializationResult = await initializeSession( viewer, calendarQuery, oldUpdatesCurrentAsOf, ); const threadCursors = {}; for (const watchedThreadID of watchedIDs) { threadCursors[watchedThreadID] = null; } const messageSelectionCriteria = { threadCursors, joinedThreads: true, newerThan: oldMessagesCurrentAsOf, }; const [ fetchMessagesResult, { serverRequests, activityUpdateResult }, ] = await Promise.all([ fetchMessageInfosSince( viewer, messageSelectionCriteria, defaultNumberPerThread, ), processClientResponses(viewer, clientResponses), ]); const messagesResult = { rawMessageInfos: fetchMessagesResult.rawMessageInfos, truncationStatuses: fetchMessagesResult.truncationStatuses, currentAsOf: mostRecentMessageTimestamp( fetchMessagesResult.rawMessageInfos, oldMessagesCurrentAsOf, ), }; if (!sessionInitializationResult.sessionContinued) { const [ threadsResult, entriesResult, currentUserInfo, knownUserInfos, ] = await Promise.all([ fetchThreadInfos(viewer), fetchEntryInfos(viewer, [calendarQuery]), fetchCurrentUserInfo(viewer), fetchKnownUserInfos(viewer), ]); const payload: ServerStateSyncFullSocketPayload = { type: stateSyncPayloadTypes.FULL, messagesResult, threadInfos: threadsResult.threadInfos, currentUserInfo, rawEntryInfos: entriesResult.rawEntryInfos, userInfos: values(knownUserInfos), updatesCurrentAsOf: oldUpdatesCurrentAsOf, }; if (viewer.sessionChanged) { // If initializeSession encounters, // sessionIdentifierTypes.BODY_SESSION_ID but the session // is unspecified or expired, // it will set a new sessionID and specify viewer.sessionChanged const { sessionID } = viewer; invariant( sessionID !== null && sessionID !== undefined, 'should be set', ); payload.sessionID = sessionID; viewer.sessionChanged = false; } responses.push({ type: serverSocketMessageTypes.STATE_SYNC, responseTo: message.id, payload, }); } else { const { sessionUpdate, deltaEntryInfoResult, } = sessionInitializationResult; const promises = {}; promises.deleteExpiredUpdates = deleteUpdatesBeforeTimeTargetingSession( viewer, oldUpdatesCurrentAsOf, ); promises.fetchUpdateResult = fetchUpdateInfos( viewer, oldUpdatesCurrentAsOf, calendarQuery, ); promises.sessionUpdate = commitSessionUpdate(viewer, sessionUpdate); const { fetchUpdateResult } = await promiseAll(promises); const { updateInfos, userInfos } = fetchUpdateResult; const newUpdatesCurrentAsOf = mostRecentUpdateTimestamp( [...updateInfos], oldUpdatesCurrentAsOf, ); const updatesResult = { newUpdates: updateInfos, currentAsOf: newUpdatesCurrentAsOf, }; responses.push({ type: serverSocketMessageTypes.STATE_SYNC, responseTo: message.id, payload: { type: stateSyncPayloadTypes.INCREMENTAL, messagesResult, updatesResult, deltaEntryInfos: deltaEntryInfoResult.rawEntryInfos, deletedEntryIDs: deltaEntryInfoResult.deletedEntryIDs, userInfos: values(userInfos), }, }); } if (serverRequests.length > 0 || clientResponses.length > 0) { // We send this message first since the STATE_SYNC triggers the client's // connection status to shift to "connected", and we want to make sure the // client responses are cleared from Redux before that happens responses.unshift({ type: serverSocketMessageTypes.REQUESTS, responseTo: message.id, payload: { serverRequests }, }); } if (activityUpdateResult) { // Same reason for unshifting as above responses.unshift({ type: serverSocketMessageTypes.ACTIVITY_UPDATE_RESPONSE, responseTo: message.id, payload: activityUpdateResult, }); } return responses; } async handleResponsesClientSocketMessage( message: ResponsesClientSocketMessage, ): Promise { const { viewer } = this; invariant(viewer, 'should be set'); const { clientResponses } = message.payload; const { stateCheckStatus } = await processClientResponses( viewer, clientResponses, ); const serverRequests = []; if (stateCheckStatus && stateCheckStatus.status !== 'state_check') { const { sessionUpdate, checkStateRequest } = await checkState( viewer, stateCheckStatus, viewer.calendarQuery, ); if (sessionUpdate) { await commitSessionUpdate(viewer, sessionUpdate); this.setStateCheckConditions({ stateCheckOngoing: false }); } if (checkStateRequest) { serverRequests.push(checkStateRequest); } } // We send a response message regardless of whether we have any requests, // since we need to ack the client's responses return [ { type: serverSocketMessageTypes.REQUESTS, responseTo: message.id, payload: { serverRequests }, }, ]; } handlePingClientSocketMessage( message: PingClientSocketMessage, ): ServerServerSocketMessage[] { return [ { type: serverSocketMessageTypes.PONG, responseTo: message.id, }, ]; } async handleAckUpdatesClientSocketMessage( message: AckUpdatesClientSocketMessage, ): Promise { const { viewer } = this; invariant(viewer, 'should be set'); const { currentAsOf } = message.payload; await Promise.all([ deleteUpdatesBeforeTimeTargetingSession(viewer, currentAsOf), commitSessionUpdate(viewer, { lastUpdate: currentAsOf }), ]); return []; } async handleAPIRequestClientSocketMessage( message: APIRequestClientSocketMessage, ): Promise { if (!endpointIsSocketSafe(message.payload.endpoint)) { throw new ServerError('endpoint_unsafe_for_socket'); } const { viewer } = this; invariant(viewer, 'should be set'); const responder = jsonEndpoints[message.payload.endpoint]; await policiesValidator(viewer, responder.requiredPolicies); const response = await responder.responder(viewer, message.payload.input); return [ { type: serverSocketMessageTypes.API_RESPONSE, responseTo: message.id, payload: response, }, ]; } onRedisMessage = async (message: RedisMessage) => { try { await this.processRedisMessage(message); } catch (e) { console.warn(e); } }; async processRedisMessage(message: RedisMessage) { if (message.type === redisMessageTypes.START_SUBSCRIPTION) { this.ws.terminate(); } else if (message.type === redisMessageTypes.NEW_UPDATES) { const { viewer } = this; invariant(viewer, 'should be set'); if (message.ignoreSession && message.ignoreSession === viewer.session) { return; } const rawUpdateInfos = message.updates; this.redisPromiseResolver.add( (async () => { const { updateInfos, userInfos, } = await fetchUpdateInfosWithRawUpdateInfos(rawUpdateInfos, { viewer, }); if (updateInfos.length === 0) { console.warn( 'could not get any UpdateInfos from redisMessageTypes.NEW_UPDATES', ); return null; } this.markActivityOccurred(); return { type: serverSocketMessageTypes.UPDATES, payload: { updatesResult: { currentAsOf: mostRecentUpdateTimestamp([...updateInfos], 0), newUpdates: updateInfos, }, userInfos: values(userInfos), }, }; })(), ); } else if (message.type === redisMessageTypes.NEW_MESSAGES) { const { viewer } = this; invariant(viewer, 'should be set'); const rawMessageInfos = message.messages; const messageFetchResult = getMessageFetchResultFromRedisMessages( viewer, rawMessageInfos, ); if (messageFetchResult.rawMessageInfos.length === 0) { console.warn( 'could not get any rawMessageInfos from ' + 'redisMessageTypes.NEW_MESSAGES', ); return; } this.redisPromiseResolver.add( (async () => { this.markActivityOccurred(); return { type: serverSocketMessageTypes.MESSAGES, payload: { messagesResult: { rawMessageInfos: messageFetchResult.rawMessageInfos, truncationStatuses: messageFetchResult.truncationStatuses, currentAsOf: mostRecentMessageTimestamp( messageFetchResult.rawMessageInfos, 0, ), }, }, }; })(), ); } } onSuccessfulConnection() { if (this.ws.readyState !== 1) { return; } this.handleStateCheckConditionsUpdate(); } // The Socket will timeout by calling this.ws.terminate() // serverRequestSocketTimeout milliseconds after the last // time resetTimeout is called resetTimeout = _debounce( () => this.ws.terminate(), serverRequestSocketTimeout, ); debouncedAfterActivity = _debounce( () => this.setStateCheckConditions({ activityRecentlyOccurred: false }), stateCheckInactivityActivationInterval, ); markActivityOccurred = () => { if (this.ws.readyState !== 1) { return; } this.setStateCheckConditions({ activityRecentlyOccurred: true }); this.debouncedAfterActivity(); }; clearStateCheckTimeout() { const { stateCheckTimeoutID } = this; if (stateCheckTimeoutID) { clearTimeout(stateCheckTimeoutID); this.stateCheckTimeoutID = null; } } setStateCheckConditions(newConditions: Shape) { this.stateCheckConditions = { ...this.stateCheckConditions, ...newConditions, }; this.handleStateCheckConditionsUpdate(); } get stateCheckCanStart() { return Object.values(this.stateCheckConditions).every(cond => !cond); } handleStateCheckConditionsUpdate() { if (!this.stateCheckCanStart) { this.clearStateCheckTimeout(); return; } if (this.stateCheckTimeoutID) { return; } const { viewer } = this; if (!viewer) { return; } const timeUntilStateCheck = viewer.sessionLastValidated + sessionCheckFrequency - Date.now(); if (timeUntilStateCheck <= 0) { this.initiateStateCheck(); } else { this.stateCheckTimeoutID = setTimeout( this.initiateStateCheck, timeUntilStateCheck, ); } } initiateStateCheck = async () => { this.setStateCheckConditions({ stateCheckOngoing: true }); const { viewer } = this; invariant(viewer, 'should be set'); const { checkStateRequest } = await checkState( viewer, { status: 'state_check' }, viewer.calendarQuery, ); invariant(checkStateRequest, 'should be set'); this.sendMessage({ type: serverSocketMessageTypes.REQUESTS, payload: { serverRequests: [checkStateRequest] }, }); }; } export { onConnection }; diff --git a/keyserver/src/updaters/account-updaters.js b/keyserver/src/updaters/account-updaters.js index a9fce9533..aed36bb54 100644 --- a/keyserver/src/updaters/account-updaters.js +++ b/keyserver/src/updaters/account-updaters.js @@ -1,111 +1,111 @@ // @flow import bcrypt from 'twin-bcrypt'; import type { ResetPasswordRequest, UpdatePasswordRequest, UpdateUserSettingsRequest, LogInResponse, -} from 'lib/types/account-types'; -import { updateTypes } from 'lib/types/update-types'; -import type { PasswordUpdate } from 'lib/types/user-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/account-types.js'; +import { updateTypes } from 'lib/types/update-types.js'; +import type { PasswordUpdate } from 'lib/types/user-types.js'; +import { ServerError } from 'lib/utils/errors.js'; -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; async function accountUpdater( viewer: Viewer, update: PasswordUpdate, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const newPassword = update.updatedFields.password; if (!newPassword) { // If it's an old client it may have given us an email, // but we don't store those anymore return; } const verifyQuery = SQL` SELECT username, hash FROM users WHERE id = ${viewer.userID} `; const [verifyResult] = await dbQuery(verifyQuery); if (verifyResult.length === 0) { throw new ServerError('internal_error'); } const verifyRow = verifyResult[0]; if (!bcrypt.compareSync(update.currentPassword, verifyRow.hash)) { throw new ServerError('invalid_credentials'); } const changedFields = { hash: bcrypt.hashSync(newPassword) }; const saveQuery = SQL` UPDATE users SET ${changedFields} WHERE id = ${viewer.userID} `; await dbQuery(saveQuery); const updateDatas = [ { type: updateTypes.UPDATE_CURRENT_USER, userID: viewer.userID, time: Date.now(), }, ]; await createUpdates(updateDatas, { viewer, updatesForCurrentSession: 'broadcast', }); } // eslint-disable-next-line no-unused-vars async function checkAndSendVerificationEmail(viewer: Viewer): Promise { // We don't want to crash old clients that call this, // but we have nothing we can do because we no longer store email addresses } async function checkAndSendPasswordResetEmail( // eslint-disable-next-line no-unused-vars request: ResetPasswordRequest, ): Promise { // We don't want to crash old clients that call this, // but we have nothing we can do because we no longer store email addresses } /* eslint-disable no-unused-vars */ async function updatePassword( viewer: Viewer, request: UpdatePasswordRequest, ): Promise { /* eslint-enable no-unused-vars */ // We have no way to handle this request anymore throw new ServerError('deprecated'); } async function updateUserSettings( viewer: Viewer, request: UpdateUserSettingsRequest, ) { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const createOrUpdateSettingsQuery = SQL` INSERT INTO settings (user, name, data) VALUES ${[[viewer.id, request.name, request.data]]} ON DUPLICATE KEY UPDATE data = VALUE(data) `; await dbQuery(createOrUpdateSettingsQuery); } export { accountUpdater, checkAndSendVerificationEmail, checkAndSendPasswordResetEmail, updateUserSettings, updatePassword, }; diff --git a/keyserver/src/updaters/activity-updaters.js b/keyserver/src/updaters/activity-updaters.js index 152f9a918..5036086bd 100644 --- a/keyserver/src/updaters/activity-updaters.js +++ b/keyserver/src/updaters/activity-updaters.js @@ -1,518 +1,518 @@ // @flow import invariant from 'invariant'; -import _difference from 'lodash/fp/difference'; -import _max from 'lodash/fp/max'; +import _difference from 'lodash/fp/difference.js'; +import _max from 'lodash/fp/max.js'; -import { localIDPrefix } from 'lib/shared/message-utils'; +import { localIDPrefix } from 'lib/shared/message-utils.js'; import type { UpdateActivityResult, UpdateActivityRequest, SetThreadUnreadStatusRequest, SetThreadUnreadStatusResult, -} from 'lib/types/activity-types'; -import { messageTypes } from 'lib/types/message-types'; -import { threadPermissions } from 'lib/types/thread-types'; -import { updateTypes } from 'lib/types/update-types'; -import { ServerError } from 'lib/utils/errors'; - -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL, mergeOrConditions } from '../database/database'; -import type { SQLStatementType } from '../database/types'; -import { deleteActivityForViewerSession } from '../deleters/activity-deleters'; +} from 'lib/types/activity-types.js'; +import { messageTypes } from 'lib/types/message-types.js'; +import { threadPermissions } from 'lib/types/thread-types.js'; +import { updateTypes } from 'lib/types/update-types.js'; +import { ServerError } from 'lib/utils/errors.js'; + +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL, mergeOrConditions } from '../database/database.js'; +import type { SQLStatementType } from '../database/types.js'; +import { deleteActivityForViewerSession } from '../deleters/activity-deleters.js'; import { checkThread, getValidThreads, -} from '../fetchers/thread-permission-fetchers'; -import { rescindPushNotifs } from '../push/rescind'; -import { updateBadgeCount } from '../push/send'; -import type { Viewer } from '../session/viewer'; -import { earliestFocusedTimeConsideredExpired } from '../shared/focused-times'; +} from '../fetchers/thread-permission-fetchers.js'; +import { rescindPushNotifs } from '../push/rescind.js'; +import { updateBadgeCount } from '../push/send.js'; +import type { Viewer } from '../session/viewer.js'; +import { earliestFocusedTimeConsideredExpired } from '../shared/focused-times.js'; type PartialThreadStatus = { +focusActive: boolean, +threadID: string, +newLastReadMessage: ?number, }; type ThreadStatus = | { +focusActive: true, +threadID: string, +newLastReadMessage: number, +curLastReadMessage: number, +rescindCondition: SQLStatementType, } | { +focusActive: false, +threadID: string, +newLastReadMessage: ?number, +curLastReadMessage: number, +rescindCondition: ?SQLStatementType, +newerMessageFromOtherAuthor: boolean, }; async function activityUpdater( viewer: Viewer, request: UpdateActivityRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const focusUpdatesByThreadID = new Map(); for (const activityUpdate of request.updates) { const threadID = activityUpdate.threadID; const updatesForThreadID = focusUpdatesByThreadID.get(threadID) ?? []; if (!focusUpdatesByThreadID.has(threadID)) { focusUpdatesByThreadID.set(threadID, updatesForThreadID); } updatesForThreadID.push(activityUpdate); } const unverifiedThreadIDs: $ReadOnlySet = new Set( request.updates.map(update => update.threadID), ); const verifiedThreadsData = await getValidThreads( viewer, [...unverifiedThreadIDs], [ { check: 'permission', permission: threadPermissions.VISIBLE, }, ], ); if (verifiedThreadsData.length === 0) { return { unfocusedToUnread: [] }; } const memberThreadIDs = new Set(); const verifiedThreadIDs = []; for (const threadData of verifiedThreadsData) { if (threadData.role > 0) { memberThreadIDs.add(threadData.threadID); } verifiedThreadIDs.push(threadData.threadID); } const partialThreadStatuses: PartialThreadStatus[] = []; for (const threadID of verifiedThreadIDs) { const focusUpdates = focusUpdatesByThreadID.get(threadID); invariant(focusUpdates, `no focusUpdate for thread ID ${threadID}`); const focusActive = !focusUpdates.some(update => !update.focus); const newLastReadMessage = _max( focusUpdates .filter( update => update.latestMessage && !update.latestMessage.startsWith(localIDPrefix), ) .map(update => parseInt(update.latestMessage)), ); partialThreadStatuses.push({ threadID, focusActive, newLastReadMessage, }); } // We update the focused rows before we check for new messages so we can // guarantee that any messages that may have set the thread to unread before // we set it to focused are caught and overriden await updateFocusedRows(viewer, partialThreadStatuses); if (memberThreadIDs.size === 0) { return { unfocusedToUnread: [] }; } const memberPartialThreadStatuses = partialThreadStatuses.filter( partialStatus => memberThreadIDs.has(partialStatus.threadID), ); const unfocusedLatestMessages = new Map(); for (const partialThreadStatus of memberPartialThreadStatuses) { const { threadID, focusActive, newLastReadMessage } = partialThreadStatus; if (!focusActive) { unfocusedLatestMessages.set(threadID, newLastReadMessage ?? 0); } } const [ unfocusedThreadsWithNewerMessages, lastMessageInfos, ] = await Promise.all([ checkForNewerMessages(viewer, unfocusedLatestMessages), fetchLastMessageInfo(viewer, [...memberThreadIDs]), ]); const threadStatuses: ThreadStatus[] = []; for (const partialThreadStatus of memberPartialThreadStatuses) { const { threadID, focusActive, newLastReadMessage } = partialThreadStatus; const lastMessageInfo = lastMessageInfos.get(threadID); invariant( lastMessageInfo !== undefined, `no lastMessageInfo for thread ID ${threadID}`, ); const { lastMessage, lastReadMessage: curLastReadMessage, } = lastMessageInfo; if (focusActive) { threadStatuses.push({ focusActive: true, threadID, newLastReadMessage: newLastReadMessage ? Math.max(lastMessage, newLastReadMessage) : lastMessage, curLastReadMessage, rescindCondition: SQL`n.thread = ${threadID}`, }); } else { threadStatuses.push({ focusActive: false, threadID, newLastReadMessage, curLastReadMessage, rescindCondition: newLastReadMessage ? SQL`(n.thread = ${threadID} AND n.message <= ${newLastReadMessage})` : null, newerMessageFromOtherAuthor: unfocusedThreadsWithNewerMessages.has( threadID, ), }); } } // The following block determines whether to enqueue updates for a given // (user, thread) pair and whether to propagate badge count notifs to all of // that user's devices const setUnread: Array<{ +threadID: string, +unread: boolean }> = []; for (const threadStatus of threadStatuses) { const { threadID, curLastReadMessage } = threadStatus; if (!threadStatus.focusActive) { const { newLastReadMessage, newerMessageFromOtherAuthor } = threadStatus; if (newerMessageFromOtherAuthor) { setUnread.push({ threadID, unread: true }); } else if (!newLastReadMessage) { // This is a rare edge case. It should only be possible for threads that // have zero messages on both the client and server, which shouldn't // happen. In this case we'll set the thread to read, just in case... console.warn(`thread ID ${threadID} appears to have no messages`); setUnread.push({ threadID, unread: false }); } else if (newLastReadMessage > curLastReadMessage) { setUnread.push({ threadID, unread: false }); } } else { const { newLastReadMessage } = threadStatus; if (newLastReadMessage > curLastReadMessage) { setUnread.push({ threadID, unread: false }); } } } const time = Date.now(); const updateDatas = setUnread.map(({ threadID, unread }) => ({ type: updateTypes.UPDATE_THREAD_READ_STATUS, userID: viewer.userID, time, threadID, unread, })); const latestMessages = new Map(); for (const threadStatus of threadStatuses) { const { threadID, newLastReadMessage, curLastReadMessage } = threadStatus; if (newLastReadMessage && newLastReadMessage > curLastReadMessage) { latestMessages.set(threadID, newLastReadMessage); } } await Promise.all([ updateLastReadMessage(viewer, latestMessages), createUpdates(updateDatas, { viewer, updatesForCurrentSession: 'ignore' }), ]); // We do this afterwards so the badge count is correct const rescindConditions = threadStatuses .map(({ rescindCondition }) => rescindCondition) .filter(Boolean); let rescindCondition; if (rescindConditions.length > 0) { rescindCondition = SQL`n.user = ${viewer.userID} AND `; rescindCondition.append(mergeOrConditions(rescindConditions)); } await rescindAndUpdateBadgeCounts( viewer, rescindCondition, updateDatas.length > 0 ? 'activity_update' : null, ); return { unfocusedToUnread: [...unfocusedThreadsWithNewerMessages] }; } async function updateFocusedRows( viewer: Viewer, partialThreadStatuses: $ReadOnlyArray, ): Promise { const threadIDs = partialThreadStatuses .filter(threadStatus => threadStatus.focusActive) .map(({ threadID }) => threadID); const time = Date.now(); if (threadIDs.length > 0) { const focusedInsertRows = threadIDs.map(threadID => [ viewer.userID, viewer.session, threadID, time, ]); const query = SQL` INSERT INTO focused (user, session, thread, time) VALUES ${focusedInsertRows} ON DUPLICATE KEY UPDATE time = VALUE(time) `; await dbQuery(query); } if (viewer.hasSessionInfo) { await deleteActivityForViewerSession(viewer, time); } } // To protect against a possible race condition, we reset the thread to unread // if the latest message ID on the client at the time that focus was dropped // is no longer the latest message ID. // Returns the set of unfocused threads that should be set to unread on // the client because a new message arrived since they were unfocused. async function checkForNewerMessages( viewer: Viewer, latestMessages: Map, ): Promise> { if (latestMessages.size === 0 || !viewer.loggedIn) { return new Set(); } const unfocusedThreadIDs = [...latestMessages.keys()]; const focusedElsewhereThreadIDs = await checkThreadsFocused( viewer, unfocusedThreadIDs, ); const unreadCandidates = _difference(unfocusedThreadIDs)( focusedElsewhereThreadIDs, ); if (unreadCandidates.length === 0) { return new Set(); } const knowOfExtractString = `$.${threadPermissions.KNOW_OF}.value`; const query = SQL` SELECT m.thread, MAX(m.id) AS latest_message FROM messages m LEFT JOIN memberships stm ON m.type = ${messageTypes.CREATE_SUB_THREAD} AND stm.thread = m.content AND stm.user = ${viewer.userID} WHERE m.thread IN (${unreadCandidates}) AND m.user != ${viewer.userID} AND ( m.type != ${messageTypes.CREATE_SUB_THREAD} OR JSON_EXTRACT(stm.permissions, ${knowOfExtractString}) IS TRUE ) GROUP BY m.thread `; const [result] = await dbQuery(query); const threadsWithNewerMessages = new Set(); for (const row of result) { const threadID = row.thread.toString(); const serverLatestMessage = row.latest_message; const clientLatestMessage = latestMessages.get(threadID); if (clientLatestMessage < serverLatestMessage) { threadsWithNewerMessages.add(threadID); } } return threadsWithNewerMessages; } async function checkThreadsFocused( viewer: Viewer, threadIDs: $ReadOnlyArray, ): Promise { const time = earliestFocusedTimeConsideredExpired(); const query = SQL` SELECT thread FROM focused WHERE time > ${time} AND user = ${viewer.userID} AND thread IN (${threadIDs}) GROUP BY thread `; const [result] = await dbQuery(query); const focusedThreadIDs = []; for (const row of result) { focusedThreadIDs.push(row.thread.toString()); } return focusedThreadIDs; } async function updateLastReadMessage( viewer: Viewer, lastReadMessages: $ReadOnlyMap, ) { if (lastReadMessages.size === 0) { return; } const query = SQL` UPDATE memberships SET last_read_message = GREATEST(last_read_message, CASE `; lastReadMessages.forEach((lastMessage, threadID) => { query.append(SQL` WHEN thread = ${threadID} THEN ${lastMessage} `); }); query.append(SQL` ELSE last_read_message END) WHERE thread IN (${[...lastReadMessages.keys()]}) AND user = ${viewer.userID} `); return await dbQuery(query); } async function setThreadUnreadStatus( viewer: Viewer, request: SetThreadUnreadStatusRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const isMemberAndCanViewThread = await checkThread(viewer, request.threadID, [ { check: 'is_member', }, { check: 'permission', permission: threadPermissions.VISIBLE, }, ]); if (!isMemberAndCanViewThread) { throw new ServerError('invalid_parameters'); } const resetThreadToUnread = await shouldResetThreadToUnread(viewer, request); if (!resetThreadToUnread) { const lastReadMessage = request.unread ? SQL`0` : SQL`GREATEST(m.last_read_message, ${request.latestMessage ?? 0})`; const update = SQL` UPDATE memberships m SET m.last_read_message = `; update.append(lastReadMessage); update.append(SQL` WHERE m.thread = ${request.threadID} AND m.user = ${viewer.userID} `); const queryPromise = dbQuery(update); const time = Date.now(); const updatesPromise = createUpdates( [ { type: updateTypes.UPDATE_THREAD_READ_STATUS, userID: viewer.userID, time: time, threadID: request.threadID, unread: request.unread, }, ], { viewer, updatesForCurrentSession: 'ignore' }, ); await Promise.all([updatesPromise, queryPromise]); } let rescindCondition; if (!request.unread) { rescindCondition = SQL` n.user = ${viewer.userID} AND n.thread = ${request.threadID} AND n.message <= ${request.latestMessage} `; } await rescindAndUpdateBadgeCounts( viewer, rescindCondition, request.unread ? 'mark_as_unread' : 'mark_as_read', ); return { resetToUnread: resetThreadToUnread, }; } async function rescindAndUpdateBadgeCounts( viewer: Viewer, rescindCondition: ?SQLStatementType, badgeCountUpdateSource: ?( | 'activity_update' | 'mark_as_unread' | 'mark_as_read' ), ) { const notificationPromises = []; if (rescindCondition) { notificationPromises.push(rescindPushNotifs(rescindCondition)); } if (badgeCountUpdateSource) { notificationPromises.push(updateBadgeCount(viewer, badgeCountUpdateSource)); } await Promise.all(notificationPromises); } async function shouldResetThreadToUnread( viewer: Viewer, request: SetThreadUnreadStatusRequest, ): Promise { if (request.unread) { return false; } const threadsWithNewerMessages = await checkForNewerMessages( viewer, new Map([[request.threadID, parseInt(request.latestMessage) || 0]]), ); return threadsWithNewerMessages.has(request.threadID); } type LastMessageInfo = { +lastMessage: number, +lastReadMessage: number, }; async function fetchLastMessageInfo( viewer: Viewer, threadIDs: $ReadOnlyArray, ) { const query = SQL` SELECT thread, last_message, last_read_message FROM memberships WHERE user = ${viewer.userID} AND thread IN (${threadIDs}) `; const [result] = await dbQuery(query); const lastMessages = new Map(); for (const row of result) { const threadID = row.thread.toString(); const lastMessage = row.last_message; const lastReadMessage = row.last_read_message; lastMessages.set(threadID, { lastMessage, lastReadMessage }); } return lastMessages; } export { activityUpdater, setThreadUnreadStatus }; diff --git a/keyserver/src/updaters/device-token-updaters.js b/keyserver/src/updaters/device-token-updaters.js index 179600249..4b35d74eb 100644 --- a/keyserver/src/updaters/device-token-updaters.js +++ b/keyserver/src/updaters/device-token-updaters.js @@ -1,49 +1,49 @@ // @flow import { type DeviceTokenUpdateRequest, isDeviceType, -} from 'lib/types/device-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/device-types.js'; +import { ServerError } from 'lib/utils/errors.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; async function deviceTokenUpdater( viewer: Viewer, update: DeviceTokenUpdateRequest, ): Promise { const deviceType = update.platformDetails?.platform ?? update.deviceType; if (!isDeviceType(deviceType)) { throw new ServerError('invalid_parameters'); } viewer.setDeviceToken(update.deviceToken); await clearDeviceToken(update.deviceToken); const setColumns = {}; setColumns.device_token = update.deviceToken; setColumns.platform = deviceType; if (update.platformDetails) { const { platform, ...versions } = update.platformDetails; if (Object.keys(versions).length > 0) { setColumns.versions = JSON.stringify(versions); } } const query = SQL` UPDATE cookies SET ${setColumns} WHERE id = ${viewer.cookieID} `; await dbQuery(query); } async function clearDeviceToken(deviceToken: string): Promise { const query = SQL` UPDATE cookies SET device_token = NULL WHERE device_token = ${deviceToken} `; await dbQuery(query); } export { deviceTokenUpdater, clearDeviceToken }; diff --git a/keyserver/src/updaters/entry-updaters.js b/keyserver/src/updaters/entry-updaters.js index cd4156636..6997faf59 100644 --- a/keyserver/src/updaters/entry-updaters.js +++ b/keyserver/src/updaters/entry-updaters.js @@ -1,289 +1,289 @@ // @flow import invariant from 'invariant'; -import _isEqual from 'lodash/fp/isEqual'; +import _isEqual from 'lodash/fp/isEqual.js'; import { rawEntryInfoWithinCalendarQuery, calendarQueryDifference, -} from 'lib/shared/entry-utils'; +} from 'lib/shared/entry-utils.js'; import { type SaveEntryRequest, type SaveEntryResponse, type RawEntryInfo, type CalendarQuery, defaultCalendarQuery, -} from 'lib/types/entry-types'; -import { messageTypes } from 'lib/types/message-types'; -import { threadPermissions } from 'lib/types/thread-types'; +} from 'lib/types/entry-types.js'; +import { messageTypes } from 'lib/types/message-types.js'; +import { threadPermissions } from 'lib/types/thread-types.js'; import { updateTypes, type ServerCreateUpdatesResponse, -} from 'lib/types/update-types'; -import { dateString } from 'lib/utils/date-utils'; -import { ServerError } from 'lib/utils/errors'; -import { values } from 'lib/utils/objects'; +} from 'lib/types/update-types.js'; +import { dateString } from 'lib/utils/date-utils.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { values } from 'lib/utils/objects.js'; -import createIDs from '../creators/id-creator'; -import createMessages from '../creators/message-creator'; -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL } from '../database/database'; +import createIDs from '../creators/id-creator.js'; +import createMessages from '../creators/message-creator.js'; +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; import { fetchEntryInfo, checkThreadPermissionForEntry, -} from '../fetchers/entry-fetchers'; -import { fetchActiveSessionsForThread } from '../fetchers/session-fetchers'; -import type { Viewer } from '../session/viewer'; -import type { SessionUpdate } from './session-updaters'; +} from '../fetchers/entry-fetchers.js'; +import { fetchActiveSessionsForThread } from '../fetchers/session-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; +import type { SessionUpdate } from './session-updaters.js'; const defaultUpdateCreationResponse = { viewerUpdates: [], userInfos: [] }; async function updateEntry( viewer: Viewer, request: SaveEntryRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const lastRevisionQuery = SQL` SELECT r.id, r.author, r.text, r.session, r.last_update, r.deleted, e.text AS entryText FROM revisions r LEFT JOIN entries e ON r.entry = e.id WHERE r.entry = ${request.entryID} ORDER BY r.last_update DESC LIMIT 1 `; const [hasPermission, entryInfo, [lastRevisionResult]] = await Promise.all([ checkThreadPermissionForEntry( viewer, request.entryID, threadPermissions.EDIT_ENTRIES, ), fetchEntryInfo(viewer, request.entryID), dbQuery(lastRevisionQuery), ]); if (!hasPermission) { throw new ServerError('invalid_credentials'); } if (!entryInfo) { throw new ServerError('invalid_parameters'); } if (entryInfo.deleted) { throw new ServerError('entry_deleted'); } if (lastRevisionResult.length === 0) { throw new ServerError('unknown_error'); } const lastRevisionRow = lastRevisionResult[0]; if ( lastRevisionRow.deleted || lastRevisionRow.text !== lastRevisionRow.entryText ) { throw new ServerError('database_corruption'); } const viewerID = viewer.userID; const dbPromises = []; let insertNewRevision = false; let shouldUpdateEntry = false; if ( viewerID === lastRevisionRow.author && viewer.session === lastRevisionRow.session ) { if (lastRevisionRow.last_update >= request.timestamp) { // Updates got sent out of order and as a result an update newer than us // has already been committed, so there's nothing to do return { entryID: request.entryID, newMessageInfos: [], updatesResult: defaultUpdateCreationResponse, }; } shouldUpdateEntry = true; if (lastRevisionRow.last_update + 120000 > request.timestamp) { dbPromises.push( dbQuery(SQL` UPDATE revisions SET last_update = ${request.timestamp}, text = ${request.text} WHERE id = ${lastRevisionRow.id} `), ); } else { insertNewRevision = true; } } else if ( viewer.session !== lastRevisionRow.session && request.prevText !== lastRevisionRow.text ) { throw new ServerError('concurrent_modification', { db: lastRevisionRow.text, ui: request.prevText, }); } else if (lastRevisionRow.last_update >= request.timestamp) { throw new ServerError('old_timestamp', { oldTime: lastRevisionRow.last_update, newTime: request.timestamp, }); } else { shouldUpdateEntry = true; insertNewRevision = true; } if (shouldUpdateEntry) { dbPromises.push( dbQuery(SQL` UPDATE entries SET last_update = ${request.timestamp}, text = ${request.text} WHERE id = ${request.entryID} `), ); } if (insertNewRevision) { const [revisionID] = await createIDs('revisions', 1); const revisionRow = [ revisionID, request.entryID, viewerID, request.text, request.timestamp, viewer.session, request.timestamp, 0, ]; dbPromises.push( dbQuery(SQL` INSERT INTO revisions(id, entry, author, text, creation_time, session, last_update, deleted) VALUES ${[revisionRow]} `), ); } const updatedEntryInfo = { ...entryInfo, text: request.text, }; const [newMessageInfos, updatesResult] = await Promise.all([ createMessages(viewer, [ { type: messageTypes.EDIT_ENTRY, threadID: entryInfo.threadID, creatorID: viewerID, time: Date.now(), entryID: request.entryID, date: dateString(entryInfo.year, entryInfo.month, entryInfo.day), text: request.text, }, ]), createUpdateDatasForChangedEntryInfo( viewer, entryInfo, updatedEntryInfo, request.calendarQuery, ), Promise.all(dbPromises), ]); return { entryID: request.entryID, newMessageInfos, updatesResult }; } async function createUpdateDatasForChangedEntryInfo( viewer: Viewer, oldEntryInfo: ?RawEntryInfo, newEntryInfo: RawEntryInfo, inputCalendarQuery: ?CalendarQuery, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const entryID = newEntryInfo.id; invariant(entryID, 'should be set'); // If we ever make it possible to move entries from one thread to another, // we should update this code to look at oldEntryInfo.threadID as well const fetchedFilters = await fetchActiveSessionsForThread( newEntryInfo.threadID, ); let calendarQuery; if (inputCalendarQuery) { calendarQuery = inputCalendarQuery; } else if (viewer.hasSessionInfo) { // This should only ever happen for "legacy" clients who call in without // providing this information. These clients wouldn't know how to deal with // the corresponding UpdateInfos anyways, so no reason to be worried. calendarQuery = viewer.calendarQuery; } else { calendarQuery = defaultCalendarQuery(viewer.platform, viewer.timeZone); } let replaced = null; const { userID } = viewer; const filters = fetchedFilters.map(filter => filter.session === viewer.session && filter.userID === userID ? (replaced = { ...filter, calendarQuery }) : filter, ); if (!replaced) { const { session } = viewer; filters.push({ userID, session, calendarQuery }); } const time = Date.now(); const updateDatas = filters .filter( filter => rawEntryInfoWithinCalendarQuery(newEntryInfo, filter.calendarQuery) || (oldEntryInfo && rawEntryInfoWithinCalendarQuery(oldEntryInfo, filter.calendarQuery)), ) .map(filter => ({ type: updateTypes.UPDATE_ENTRY, userID: filter.userID, time, entryID, targetSession: filter.session, })); const { userInfos, ...updatesResult } = await createUpdates(updateDatas, { viewer, calendarQuery, updatesForCurrentSession: 'return', }); return { ...updatesResult, userInfos: values(userInfos), }; } type CalendarQueryComparisonResult = { +difference: $ReadOnlyArray, +oldCalendarQuery: CalendarQuery, +sessionUpdate: SessionUpdate, }; function compareNewCalendarQuery( viewer: Viewer, newCalendarQuery: CalendarQuery, ): CalendarQueryComparisonResult { if (!viewer.hasSessionInfo) { throw new ServerError('unknown_error'); } const oldCalendarQuery = viewer.calendarQuery; const difference = calendarQueryDifference( oldCalendarQuery, newCalendarQuery, ); const sessionUpdate = _isEqual(oldCalendarQuery)(newCalendarQuery) ? {} : { query: newCalendarQuery }; return { difference, oldCalendarQuery, sessionUpdate: Object.freeze({ ...sessionUpdate }), }; } export { updateEntry, createUpdateDatasForChangedEntryInfo, compareNewCalendarQuery, }; diff --git a/keyserver/src/updaters/relationship-updaters.js b/keyserver/src/updaters/relationship-updaters.js index aa6a7bc69..d765b5207 100644 --- a/keyserver/src/updaters/relationship-updaters.js +++ b/keyserver/src/updaters/relationship-updaters.js @@ -1,370 +1,370 @@ // @flow import invariant from 'invariant'; -import { sortIDs } from 'lib/shared/relationship-utils'; -import { messageTypes } from 'lib/types/message-types'; +import { sortIDs } from 'lib/shared/relationship-utils.js'; +import { messageTypes } from 'lib/types/message-types.js'; import { type RelationshipRequest, type RelationshipErrors, type UndirectedRelationshipRow, relationshipActions, undirectedStatus, directedStatus, -} from 'lib/types/relationship-types'; -import { threadTypes } from 'lib/types/thread-types'; -import { updateTypes, type UpdateData } from 'lib/types/update-types'; -import { cartesianProduct } from 'lib/utils/array'; -import { ServerError } from 'lib/utils/errors'; -import { promiseAll } from 'lib/utils/promises'; - -import createMessages from '../creators/message-creator'; -import { createThread } from '../creators/thread-creator'; -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL, mergeOrConditions } from '../database/database'; -import { fetchFriendRequestRelationshipOperations } from '../fetchers/relationship-fetchers'; -import { fetchUserInfos } from '../fetchers/user-fetchers'; -import type { Viewer } from '../session/viewer'; +} from 'lib/types/relationship-types.js'; +import { threadTypes } from 'lib/types/thread-types.js'; +import { updateTypes, type UpdateData } from 'lib/types/update-types.js'; +import { cartesianProduct } from 'lib/utils/array.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { promiseAll } from 'lib/utils/promises.js'; + +import createMessages from '../creators/message-creator.js'; +import { createThread } from '../creators/thread-creator.js'; +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL, mergeOrConditions } from '../database/database.js'; +import { fetchFriendRequestRelationshipOperations } from '../fetchers/relationship-fetchers.js'; +import { fetchUserInfos } from '../fetchers/user-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; async function updateRelationships( viewer: Viewer, request: RelationshipRequest, ): Promise { const { action } = request; if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const uniqueUserIDs = [...new Set(request.userIDs)]; const users = await fetchUserInfos(uniqueUserIDs); let errors = {}; const userIDs: string[] = []; for (const userID of uniqueUserIDs) { if (userID === viewer.userID || !users[userID].username) { const acc = errors.invalid_user || []; errors.invalid_user = [...acc, userID]; } else { userIDs.push(userID); } } if (!userIDs.length) { return Object.freeze({ ...errors }); } const updateIDs = []; if (action === relationshipActions.FRIEND) { // We have to create personal threads before setting the relationship // status. By doing that we make sure that failed thread creation is // reported to the caller and can be repeated - there should be only // one PERSONAL thread per a pair of users and we can safely call it // repeatedly. const threadIDPerUser = await createPersonalThreads(viewer, request); const { userRelationshipOperations, errors: friendRequestErrors, } = await fetchFriendRequestRelationshipOperations(viewer, userIDs); errors = { ...errors, ...friendRequestErrors }; const undirectedInsertRows = []; const directedInsertRows = []; const directedDeleteIDs = []; const messageDatas = []; const now = Date.now(); for (const userID in userRelationshipOperations) { const operations = userRelationshipOperations[userID]; const ids = sortIDs(viewer.userID, userID); if (operations.length) { updateIDs.push(userID); } for (const operation of operations) { if (operation === 'delete_directed') { directedDeleteIDs.push(userID); } else if (operation === 'friend') { const [user1, user2] = ids; const status = undirectedStatus.FRIEND; undirectedInsertRows.push({ user1, user2, status }); messageDatas.push({ type: messageTypes.UPDATE_RELATIONSHIP, threadID: threadIDPerUser[userID], creatorID: viewer.userID, targetID: userID, time: now, operation: 'request_accepted', }); } else if (operation === 'pending_friend') { const status = directedStatus.PENDING_FRIEND; directedInsertRows.push([viewer.userID, userID, status]); messageDatas.push({ type: messageTypes.UPDATE_RELATIONSHIP, threadID: threadIDPerUser[userID], creatorID: viewer.userID, targetID: userID, time: now, operation: 'request_sent', }); } else if (operation === 'know_of') { const [user1, user2] = ids; const status = undirectedStatus.KNOW_OF; undirectedInsertRows.push({ user1, user2, status }); } else { invariant(false, `unexpected relationship operation ${operation}`); } } } const promises = [updateUndirectedRelationships(undirectedInsertRows)]; if (directedInsertRows.length) { const directedInsertQuery = SQL` INSERT INTO relationships_directed (user1, user2, status) VALUES ${directedInsertRows} ON DUPLICATE KEY UPDATE status = VALUE(status) `; promises.push(dbQuery(directedInsertQuery)); } if (directedDeleteIDs.length) { const directedDeleteQuery = SQL` DELETE FROM relationships_directed WHERE (user1 = ${viewer.userID} AND user2 IN (${directedDeleteIDs})) OR (status = ${directedStatus.PENDING_FRIEND} AND user1 IN (${directedDeleteIDs}) AND user2 = ${viewer.userID}) `; promises.push(dbQuery(directedDeleteQuery)); } if (messageDatas.length > 0) { promises.push(createMessages(viewer, messageDatas, 'broadcast')); } await Promise.all(promises); } else if (action === relationshipActions.UNFRIEND) { updateIDs.push(...userIDs); const updateRows = userIDs.map(userID => { const [user1, user2] = sortIDs(viewer.userID, userID); return { user1, user2, status: undirectedStatus.KNOW_OF }; }); const deleteQuery = SQL` DELETE FROM relationships_directed WHERE status = ${directedStatus.PENDING_FRIEND} AND (user1 = ${viewer.userID} AND user2 IN (${userIDs}) OR user1 IN (${userIDs}) AND user2 = ${viewer.userID}) `; await Promise.all([ updateUndirectedRelationships(updateRows, false), dbQuery(deleteQuery), ]); } else if (action === relationshipActions.BLOCK) { updateIDs.push(...userIDs); const directedRows = []; const undirectedRows = []; for (const userID of userIDs) { directedRows.push([viewer.userID, userID, directedStatus.BLOCKED]); const [user1, user2] = sortIDs(viewer.userID, userID); undirectedRows.push({ user1, user2, status: undirectedStatus.KNOW_OF }); } const directedInsertQuery = SQL` INSERT INTO relationships_directed (user1, user2, status) VALUES ${directedRows} ON DUPLICATE KEY UPDATE status = VALUE(status) `; const directedDeleteQuery = SQL` DELETE FROM relationships_directed WHERE status = ${directedStatus.PENDING_FRIEND} AND user1 IN (${userIDs}) AND user2 = ${viewer.userID} `; await Promise.all([ dbQuery(directedInsertQuery), dbQuery(directedDeleteQuery), updateUndirectedRelationships(undirectedRows, false), ]); } else if (action === relationshipActions.UNBLOCK) { updateIDs.push(...userIDs); const query = SQL` DELETE FROM relationships_directed WHERE status = ${directedStatus.BLOCKED} AND user1 = ${viewer.userID} AND user2 IN (${userIDs}) `; await dbQuery(query); } else { invariant(false, `action ${action} is invalid or not supported currently`); } await createUpdates( updateDatasForUserPairs(cartesianProduct([viewer.userID], updateIDs)), ); return Object.freeze({ ...errors }); } function updateDatasForUserPairs( userPairs: $ReadOnlyArray<[string, string]>, ): UpdateData[] { const time = Date.now(); const updateDatas = []; for (const [user1, user2] of userPairs) { updateDatas.push({ type: updateTypes.UPDATE_USER, userID: user1, time, updatedUserID: user2, }); updateDatas.push({ type: updateTypes.UPDATE_USER, userID: user2, time, updatedUserID: user1, }); } return updateDatas; } async function updateUndirectedRelationships( changeset: UndirectedRelationshipRow[], greatest: boolean = true, ) { if (!changeset.length) { return; } const rows = changeset.map(row => [row.user1, row.user2, row.status]); const query = SQL` INSERT INTO relationships_undirected (user1, user2, status) VALUES ${rows} `; if (greatest) { query.append( SQL`ON DUPLICATE KEY UPDATE status = GREATEST(status, VALUE(status))`, ); } else { query.append(SQL`ON DUPLICATE KEY UPDATE status = VALUE(status)`); } await dbQuery(query); } async function updateChangedUndirectedRelationships( changeset: UndirectedRelationshipRow[], ): Promise { if (changeset.length === 0) { return []; } const user2ByUser1: Map> = new Map(); for (const { user1, user2 } of changeset) { if (!user2ByUser1.has(user1)) { user2ByUser1.set(user1, new Set()); } user2ByUser1.get(user1)?.add(user2); } const selectQuery = SQL` SELECT user1, user2, status FROM relationships_undirected WHERE `; const conditions = []; for (const [user1, users] of user2ByUser1) { conditions.push(SQL`(user1 = ${user1} AND user2 IN (${[...users]}))`); } selectQuery.append(mergeOrConditions(conditions)); const [result] = await dbQuery(selectQuery); const existingStatuses = new Map(); for (const row of result) { existingStatuses.set(`${row.user1}|${row.user2}`, row.status); } const insertRows = []; for (const row of changeset) { const existingStatus = existingStatuses.get(`${row.user1}|${row.user2}`); if (!existingStatus || existingStatus < row.status) { insertRows.push([row.user1, row.user2, row.status]); } } if (insertRows.length === 0) { return []; } const insertQuery = SQL` INSERT INTO relationships_undirected (user1, user2, status) VALUES ${insertRows} ON DUPLICATE KEY UPDATE status = GREATEST(status, VALUE(status)) `; await dbQuery(insertQuery); return updateDatasForUserPairs( insertRows.map(([user1, user2]) => [user1, user2]), ); } async function createPersonalThreads( viewer: Viewer, request: RelationshipRequest, ) { invariant( request.action === relationshipActions.FRIEND, 'We should only create a PERSONAL threads when sending a FRIEND request, ' + `but we tried to do that for ${request.action}`, ); const threadIDPerUser = {}; const personalThreadsQuery = SQL` SELECT t.id AS threadID, m2.user AS user2 FROM threads t INNER JOIN memberships m1 ON m1.thread = t.id AND m1.user = ${viewer.userID} INNER JOIN memberships m2 ON m2.thread = t.id AND m2.user IN (${request.userIDs}) WHERE t.type = ${threadTypes.PERSONAL} AND m1.role > 0 AND m2.role > 0 `; const [personalThreadsResult] = await dbQuery(personalThreadsQuery); for (const row of personalThreadsResult) { const user2 = row.user2.toString(); threadIDPerUser[user2] = row.threadID.toString(); } const threadCreationPromises = {}; for (const userID of request.userIDs) { if (threadIDPerUser[userID]) { continue; } threadCreationPromises[userID] = createThread( viewer, { type: threadTypes.PERSONAL, initialMemberIDs: [userID], }, { forceAddMembers: true, updatesForCurrentSession: 'broadcast' }, ); } const personalThreadPerUser = await promiseAll(threadCreationPromises); for (const userID in personalThreadPerUser) { const newThread = personalThreadPerUser[userID]; threadIDPerUser[userID] = newThread.newThreadID ?? newThread.newThreadInfo.id; } return threadIDPerUser; } export { updateRelationships, updateDatasForUserPairs, updateUndirectedRelationships, updateChangedUndirectedRelationships, }; diff --git a/keyserver/src/updaters/role-updaters.js b/keyserver/src/updaters/role-updaters.js index 8740bfe19..bbae7caab 100644 --- a/keyserver/src/updaters/role-updaters.js +++ b/keyserver/src/updaters/role-updaters.js @@ -1,109 +1,109 @@ // @flow import invariant from 'invariant'; -import _isEqual from 'lodash/fp/isEqual'; +import _isEqual from 'lodash/fp/isEqual.js'; -import type { ThreadType } from 'lib/types/thread-types'; +import type { ThreadType } from 'lib/types/thread-types.js'; -import createIDs from '../creators/id-creator'; -import { getRolePermissionBlobs } from '../creators/role-creator'; -import { dbQuery, SQL } from '../database/database'; -import { fetchRoles } from '../fetchers/role-fetchers'; -import type { Viewer } from '../session/viewer'; +import createIDs from '../creators/id-creator.js'; +import { getRolePermissionBlobs } from '../creators/role-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { fetchRoles } from '../fetchers/role-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; async function updateRoles( viewer: Viewer, threadID: string, threadType: ThreadType, ): Promise { const currentRoles = await fetchRoles(threadID); const currentRolePermissions = {}; const currentRoleIDs = {}; for (const roleInfo of currentRoles) { currentRolePermissions[roleInfo.name] = roleInfo.permissions; currentRoleIDs[roleInfo.name] = roleInfo.id; } const rolePermissions = getRolePermissionBlobs(threadType); if (_isEqual(rolePermissions)(currentRolePermissions)) { return; } const promises = []; if (rolePermissions.Admins && !currentRolePermissions.Admins) { const [id] = await createIDs('roles', 1); const newRow = [ id, threadID, 'Admins', JSON.stringify(rolePermissions.Admins), Date.now(), ]; const insertQuery = SQL` INSERT INTO roles (id, thread, name, permissions, creation_time) VALUES ${[newRow]} `; promises.push(dbQuery(insertQuery)); const setAdminQuery = SQL` UPDATE memberships SET role = ${id} WHERE thread = ${threadID} AND user = ${viewer.userID} AND role > 0 `; promises.push(dbQuery(setAdminQuery)); } else if (!rolePermissions.Admins && currentRolePermissions.Admins) { invariant( currentRoleIDs.Admins && currentRoleIDs.Members, 'ids should exist for both Admins and Members roles', ); const id = currentRoleIDs.Admins; const deleteQuery = SQL` DELETE r, i FROM roles r LEFT JOIN ids i ON i.id = r.id WHERE r.id = ${id} `; promises.push(dbQuery(deleteQuery)); const updateMembershipsQuery = SQL` UPDATE memberships SET role = ${currentRoleIDs.Members} WHERE thread = ${threadID} AND role > 0 `; promises.push(dbQuery(updateMembershipsQuery)); } const updatePermissions = {}; for (const name in currentRoleIDs) { const currentPermissions = currentRolePermissions[name]; const permissions = rolePermissions[name]; if ( !permissions || !currentPermissions || _isEqual(permissions)(currentPermissions) ) { continue; } const id = currentRoleIDs[name]; updatePermissions[id] = permissions; } if (Object.values(updatePermissions).length > 0) { const updateQuery = SQL` UPDATE roles SET permissions = CASE id `; for (const id in updatePermissions) { const permissionsBlob = JSON.stringify(updatePermissions[id]); updateQuery.append(SQL` WHEN ${id} THEN ${permissionsBlob} `); } updateQuery.append(SQL` ELSE permissions END WHERE thread = ${threadID} `); promises.push(dbQuery(updateQuery)); } await Promise.all(promises); } export { updateRoles }; diff --git a/keyserver/src/updaters/session-updaters.js b/keyserver/src/updaters/session-updaters.js index 7e4a6b853..eca119ab1 100644 --- a/keyserver/src/updaters/session-updaters.js +++ b/keyserver/src/updaters/session-updaters.js @@ -1,53 +1,53 @@ // @flow -import type { Shape } from 'lib/types/core'; -import type { CalendarQuery } from 'lib/types/entry-types'; +import type { Shape } from 'lib/types/core.js'; +import type { CalendarQuery } from 'lib/types/entry-types.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; export type SessionUpdate = Shape<{ +query: CalendarQuery, +lastUpdate: number, +lastValidated: number, }>; async function commitSessionUpdate( viewer: Viewer, sessionUpdate: SessionUpdate, ): Promise { const sqlUpdate = {}; if (sessionUpdate.query) { sqlUpdate.query = JSON.stringify(sessionUpdate.query); } const { lastUpdate, lastValidated } = sessionUpdate; if (lastUpdate !== null && lastUpdate !== undefined) { sqlUpdate.last_update = lastUpdate; } if (lastValidated !== null && lastValidated !== undefined) { sqlUpdate.last_validated = lastValidated; } if (Object.keys(sqlUpdate).length === 0) { return; } viewer.setSessionInfo({ lastUpdate: sessionUpdate.lastUpdate ? sessionUpdate.lastUpdate : viewer.sessionLastUpdated, lastValidated: sessionUpdate.lastValidated ? sessionUpdate.lastValidated : viewer.sessionLastValidated, calendarQuery: sessionUpdate.query ? sessionUpdate.query : viewer.calendarQuery, }); const query = SQL` UPDATE sessions SET ${sqlUpdate} WHERE id = ${viewer.session} `; await dbQuery(query); } export { commitSessionUpdate }; diff --git a/keyserver/src/updaters/thread-permission-updaters.js b/keyserver/src/updaters/thread-permission-updaters.js index 2f40f4b14..632da3160 100644 --- a/keyserver/src/updaters/thread-permission-updaters.js +++ b/keyserver/src/updaters/thread-permission-updaters.js @@ -1,1300 +1,1300 @@ // @flow import invariant from 'invariant'; -import _isEqual from 'lodash/fp/isEqual'; +import _isEqual from 'lodash/fp/isEqual.js'; -import bots from 'lib/facts/bots'; -import genesis from 'lib/facts/genesis'; +import bots from 'lib/facts/bots.js'; +import genesis from 'lib/facts/genesis.js'; import { makePermissionsBlob, makePermissionsForChildrenBlob, getRoleForPermissions, -} from 'lib/permissions/thread-permissions'; -import type { CalendarQuery } from 'lib/types/entry-types'; +} from 'lib/permissions/thread-permissions.js'; +import type { CalendarQuery } from 'lib/types/entry-types.js'; import { type ThreadPermissionsBlob, type ThreadRolePermissionsBlob, type ThreadType, assertThreadType, -} from 'lib/types/thread-types'; +} from 'lib/types/thread-types.js'; import { updateTypes, type ServerUpdateInfo, type CreateUpdatesResult, -} from 'lib/types/update-types'; -import { pushAll } from 'lib/utils/array'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/update-types.js'; +import { pushAll } from 'lib/utils/array.js'; +import { ServerError } from 'lib/utils/errors.js'; import { createUpdates, type UpdatesForCurrentSession, -} from '../creators/update-creator'; -import { dbQuery, SQL } from '../database/database'; +} from '../creators/update-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; import { fetchServerThreadInfos, rawThreadInfosFromServerThreadInfos, type FetchThreadInfosResult, -} from '../fetchers/thread-fetchers'; -import { rescindPushNotifs } from '../push/rescind'; -import { createScriptViewer } from '../session/scripts'; -import type { Viewer } from '../session/viewer'; -import { updateRoles } from '../updaters/role-updaters'; -import DepthQueue from '../utils/depth-queue'; -import RelationshipChangeset from '../utils/relationship-changeset'; -import { updateChangedUndirectedRelationships } from './relationship-updaters'; +} from '../fetchers/thread-fetchers.js'; +import { rescindPushNotifs } from '../push/rescind.js'; +import { createScriptViewer } from '../session/scripts.js'; +import type { Viewer } from '../session/viewer.js'; +import { updateRoles } from '../updaters/role-updaters.js'; +import DepthQueue from '../utils/depth-queue.js'; +import RelationshipChangeset from '../utils/relationship-changeset.js'; +import { updateChangedUndirectedRelationships } from './relationship-updaters.js'; export type MembershipRowToSave = { +operation: 'save', +intent: 'join' | 'leave' | 'none', +userID: string, +threadID: string, +userNeedsFullThreadDetails: boolean, +permissions: ?ThreadPermissionsBlob, +permissionsForChildren: ?ThreadPermissionsBlob, // null role represents by "0" +role: string, +oldRole: string, +unread?: boolean, }; type MembershipRowToDelete = { +operation: 'delete', +intent: 'join' | 'leave' | 'none', +userID: string, +threadID: string, +oldRole: string, }; type MembershipRow = MembershipRowToSave | MembershipRowToDelete; type Changeset = { +membershipRows: MembershipRow[], +relationshipChangeset: RelationshipChangeset, }; // 0 role means to remove the user from the thread // null role means to set the user to the default role // string role means to set the user to the role with that ID // -1 role means to set the user as a "ghost" (former member) type ChangeRoleOptions = { +setNewMembersToUnread?: boolean, }; type ChangeRoleMemberInfo = { permissionsFromParent?: ?ThreadPermissionsBlob, memberOfContainingThread?: boolean, }; async function changeRole( threadID: string, userIDs: $ReadOnlyArray, role: string | -1 | 0 | null, options?: ChangeRoleOptions, ): Promise { const intent = role === -1 || role === 0 ? 'leave' : 'join'; const setNewMembersToUnread = options?.setNewMembersToUnread && intent === 'join'; if (userIDs.length === 0) { return { membershipRows: [], relationshipChangeset: new RelationshipChangeset(), }; } const membershipQuery = SQL` SELECT user, role, permissions, permissions_for_children FROM memberships WHERE thread = ${threadID} `; const parentMembershipQuery = SQL` SELECT pm.user, pm.permissions_for_children AS permissions_from_parent FROM threads t INNER JOIN memberships pm ON pm.thread = t.parent_thread_id WHERE t.id = ${threadID} AND (pm.user IN (${userIDs}) OR t.parent_thread_id != ${genesis.id}) `; const containingMembershipQuery = SQL` SELECT cm.user, cm.role AS containing_role FROM threads t INNER JOIN memberships cm ON cm.thread = t.containing_thread_id WHERE t.id = ${threadID} AND cm.user IN (${userIDs}) `; const [ [membershipResults], [parentMembershipResults], containingMembershipResults, roleThreadResult, ] = await Promise.all([ dbQuery(membershipQuery), dbQuery(parentMembershipQuery), (async () => { if (intent === 'leave') { // Membership in the container only needs to be checked for members return []; } const [result] = await dbQuery(containingMembershipQuery); return result; })(), changeRoleThreadQuery(threadID, role), ]); const { roleColumnValue: intendedRole, threadType, parentThreadID, hasContainingThreadID, rolePermissions: intendedRolePermissions, depth, } = roleThreadResult; const existingMembershipInfo = new Map(); for (const row of membershipResults) { const userID = row.user.toString(); existingMembershipInfo.set(userID, { oldRole: row.role.toString(), oldPermissions: JSON.parse(row.permissions), oldPermissionsForChildren: JSON.parse(row.permissions_for_children), }); } const ancestorMembershipInfo: Map = new Map(); for (const row of parentMembershipResults) { const userID = row.user.toString(); if (!userIDs.includes(userID)) { continue; } ancestorMembershipInfo.set(userID, { permissionsFromParent: JSON.parse(row.permissions_from_parent), }); } for (const row of containingMembershipResults) { const userID = row.user.toString(); const ancestorMembership = ancestorMembershipInfo.get(userID); const memberOfContainingThread = row.containing_role > 0; if (ancestorMembership) { ancestorMembership.memberOfContainingThread = memberOfContainingThread; } else { ancestorMembershipInfo.set(userID, { memberOfContainingThread, }); } } const relationshipChangeset = new RelationshipChangeset(); const existingMemberIDs = [...existingMembershipInfo.keys()]; if (threadID !== genesis.id) { relationshipChangeset.setAllRelationshipsExist(existingMemberIDs); } const parentMemberIDs = parentMembershipResults.map(row => row.user.toString(), ); if (parentThreadID && parentThreadID !== genesis.id) { relationshipChangeset.setAllRelationshipsExist(parentMemberIDs); } const membershipRows = []; const toUpdateDescendants = new Map(); for (const userID of userIDs) { const existingMembership = existingMembershipInfo.get(userID); const oldRole = existingMembership?.oldRole ?? '-1'; const oldPermissions = existingMembership?.oldPermissions ?? null; const oldPermissionsForChildren = existingMembership?.oldPermissionsForChildren ?? null; if (existingMembership && oldRole === intendedRole) { // If the old role is the same as the new one, we have nothing to update continue; } else if (Number(oldRole) > 0 && role === null) { // In the case where we're just trying to add somebody to a thread, if // they already have a role with a nonzero role then we don't need to do // anything continue; } let permissionsFromParent = null; let memberOfContainingThread = false; const ancestorMembership = ancestorMembershipInfo.get(userID); if (ancestorMembership) { permissionsFromParent = ancestorMembership.permissionsFromParent; memberOfContainingThread = ancestorMembership.memberOfContainingThread; } if (!hasContainingThreadID) { memberOfContainingThread = true; } const rolePermissions = memberOfContainingThread ? intendedRolePermissions : null; const targetRole = memberOfContainingThread ? intendedRole : '-1'; const permissions = makePermissionsBlob( rolePermissions, permissionsFromParent, threadID, threadType, ); const permissionsForChildren = makePermissionsForChildrenBlob(permissions); const newRole = getRoleForPermissions(targetRole, permissions); const userBecameMember = Number(oldRole) <= 0 && Number(newRole) > 0; const userLostMembership = Number(oldRole) > 0 && Number(newRole) <= 0; if ( (intent === 'join' && Number(newRole) <= 0) || (intent === 'leave' && Number(newRole) > 0) ) { throw new ServerError('invalid_parameters'); } else if (intendedRole !== newRole) { console.warn( `changeRole called for role=${intendedRole}, but ended up setting ` + `role=${newRole} for userID ${userID} and threadID ${threadID}, ` + 'probably because KNOW_OF permission was unexpectedly present or ' + 'missing', ); } if ( existingMembership && _isEqual(permissions)(oldPermissions) && oldRole === newRole ) { // This thread and all of its descendants need no updates for this user, // since the corresponding memberships row is unchanged by this operation continue; } if (permissions) { membershipRows.push({ operation: 'save', intent, userID, threadID, userNeedsFullThreadDetails: userBecameMember, permissions, permissionsForChildren, role: newRole, oldRole, unread: userBecameMember && setNewMembersToUnread, }); } else { membershipRows.push({ operation: 'delete', intent, userID, threadID, oldRole, }); } if (permissions && !existingMembership && threadID !== genesis.id) { relationshipChangeset.setRelationshipsNeeded(userID, existingMemberIDs); } if ( userLostMembership || !_isEqual(permissionsForChildren)(oldPermissionsForChildren) ) { toUpdateDescendants.set(userID, { userIsMember: Number(newRole) > 0, permissionsForChildren, }); } } if (toUpdateDescendants.size > 0) { const { membershipRows: descendantMembershipRows, relationshipChangeset: descendantRelationshipChangeset, } = await updateDescendantPermissions({ threadID, depth, changesByUser: toUpdateDescendants, }); pushAll(membershipRows, descendantMembershipRows); relationshipChangeset.addAll(descendantRelationshipChangeset); } return { membershipRows, relationshipChangeset }; } type RoleThreadResult = { +roleColumnValue: string, +depth: number, +threadType: ThreadType, +parentThreadID: ?string, +hasContainingThreadID: boolean, +rolePermissions: ?ThreadRolePermissionsBlob, }; async function changeRoleThreadQuery( threadID: string, role: string | -1 | 0 | null, ): Promise { if (role === 0 || role === -1) { const query = SQL` SELECT type, depth, parent_thread_id, containing_thread_id FROM threads WHERE id = ${threadID} `; const [result] = await dbQuery(query); if (result.length === 0) { throw new ServerError('internal_error'); } const row = result[0]; return { roleColumnValue: role.toString(), depth: row.depth, threadType: assertThreadType(row.type), parentThreadID: row.parent_thread_id ? row.parent_thread_id.toString() : null, hasContainingThreadID: row.containing_thread_id !== null, rolePermissions: null, }; } else if (role !== null) { const query = SQL` SELECT t.type, t.depth, t.parent_thread_id, t.containing_thread_id, r.permissions FROM threads t INNER JOIN roles r ON r.thread = t.id AND r.id = ${role} WHERE t.id = ${threadID} `; const [result] = await dbQuery(query); if (result.length === 0) { throw new ServerError('internal_error'); } const row = result[0]; return { roleColumnValue: role, depth: row.depth, threadType: assertThreadType(row.type), parentThreadID: row.parent_thread_id ? row.parent_thread_id.toString() : null, hasContainingThreadID: row.containing_thread_id !== null, rolePermissions: JSON.parse(row.permissions), }; } else { const query = SQL` SELECT t.type, t.depth, t.parent_thread_id, t.containing_thread_id, t.default_role, r.permissions FROM threads t INNER JOIN roles r ON r.thread = t.id AND r.id = t.default_role WHERE t.id = ${threadID} `; const [result] = await dbQuery(query); if (result.length === 0) { throw new ServerError('internal_error'); } const row = result[0]; return { roleColumnValue: row.default_role.toString(), depth: row.depth, threadType: assertThreadType(row.type), parentThreadID: row.parent_thread_id ? row.parent_thread_id.toString() : null, hasContainingThreadID: row.containing_thread_id !== null, rolePermissions: JSON.parse(row.permissions), }; } } type ChangedAncestor = { +threadID: string, +depth: number, +changesByUser: Map, }; type AncestorChanges = { +userIsMember: boolean, +permissionsForChildren: ?ThreadPermissionsBlob, }; async function updateDescendantPermissions( initialChangedAncestor: ChangedAncestor, ): Promise { const membershipRows = []; const relationshipChangeset = new RelationshipChangeset(); const initialDescendants = await fetchDescendantsForUpdate([ initialChangedAncestor, ]); const depthQueue = new DepthQueue( getDescendantDepth, getDescendantKey, mergeDescendants, ); depthQueue.addInfos(initialDescendants); let descendants; while ((descendants = depthQueue.getNextDepth())) { const descendantsAsAncestors = []; for (const descendant of descendants) { const { threadID, threadType, depth, users } = descendant; const existingMembers = [...users.entries()]; const existingMemberIDs = existingMembers .filter(([, { curRole }]) => curRole) .map(([userID]) => userID); if (threadID !== genesis.id) { relationshipChangeset.setAllRelationshipsExist(existingMemberIDs); } const usersForNextLayer = new Map(); for (const [userID, user] of users) { const { curRolePermissions, curPermissionsFromParent, curMemberOfContainingThread, nextMemberOfContainingThread, nextPermissionsFromParent, potentiallyNeedsUpdate, } = user; const existingMembership = !!user.curRole; const curRole = user.curRole ?? '-1'; const curPermissions = user.curPermissions ?? null; const curPermissionsForChildren = user.curPermissionsForChildren ?? null; if (!potentiallyNeedsUpdate) { continue; } const permissionsFromParent = nextPermissionsFromParent === undefined ? curPermissionsFromParent : nextPermissionsFromParent; const memberOfContainingThread = nextMemberOfContainingThread === undefined ? curMemberOfContainingThread : nextMemberOfContainingThread; const targetRole = memberOfContainingThread ? curRole : '-1'; const rolePermissions = memberOfContainingThread ? curRolePermissions : null; const permissions = makePermissionsBlob( rolePermissions, permissionsFromParent, threadID, threadType, ); const permissionsForChildren = makePermissionsForChildrenBlob( permissions, ); const newRole = getRoleForPermissions(targetRole, permissions); const userLostMembership = Number(curRole) > 0 && Number(newRole) <= 0; if (_isEqual(permissions)(curPermissions) && curRole === newRole) { // This thread and all of its descendants need no updates for this // user, since the corresponding memberships row is unchanged by this // operation continue; } if (permissions) { membershipRows.push({ operation: 'save', intent: 'none', userID, threadID, userNeedsFullThreadDetails: false, permissions, permissionsForChildren, role: newRole, oldRole: curRole, }); } else { membershipRows.push({ operation: 'delete', intent: 'none', userID, threadID, oldRole: curRole, }); } if (permissions && !existingMembership && threadID !== genesis.id) { // If there was no membership row before, and we are creating one, // we'll need to make sure the new member has a relationship row with // each existing member. We expect that whoever called us already // generated memberships row for the new members, will will lead // saveMemberships to generate relationships rows between those new // users. relationshipChangeset.setRelationshipsNeeded( userID, existingMemberIDs, ); } if ( userLostMembership || !_isEqual(permissionsForChildren)(curPermissionsForChildren) ) { usersForNextLayer.set(userID, { userIsMember: Number(newRole) > 0, permissionsForChildren, }); } } if (usersForNextLayer.size > 0) { descendantsAsAncestors.push({ threadID, depth, changesByUser: usersForNextLayer, }); } } const nextDescendants = await fetchDescendantsForUpdate( descendantsAsAncestors, ); depthQueue.addInfos(nextDescendants); } return { membershipRows, relationshipChangeset }; } type DescendantUserInfo = $Shape<{ curRole?: string, curRolePermissions?: ?ThreadRolePermissionsBlob, curPermissions?: ?ThreadPermissionsBlob, curPermissionsForChildren?: ?ThreadPermissionsBlob, curPermissionsFromParent?: ?ThreadPermissionsBlob, curMemberOfContainingThread?: boolean, nextPermissionsFromParent?: ?ThreadPermissionsBlob, nextMemberOfContainingThread?: boolean, potentiallyNeedsUpdate?: boolean, }>; type DescendantInfo = { +threadID: string, +parentThreadID: string, +containingThreadID: string, +threadType: ThreadType, +depth: number, +users: Map, }; const fetchDescendantsBatchSize = 10; async function fetchDescendantsForUpdate( ancestors: $ReadOnlyArray, ): Promise { const threadIDs = ancestors.map(ancestor => ancestor.threadID); const rows = []; while (threadIDs.length > 0) { const batch = threadIDs.splice(0, fetchDescendantsBatchSize); const query = SQL` SELECT t.id, m.user, t.type, t.depth, t.parent_thread_id, t.containing_thread_id, r.permissions AS role_permissions, m.permissions, m.permissions_for_children, m.role, pm.permissions_for_children AS permissions_from_parent, cm.role AS containing_role FROM threads t INNER JOIN memberships m ON m.thread = t.id LEFT JOIN memberships pm ON pm.thread = t.parent_thread_id AND pm.user = m.user LEFT JOIN memberships cm ON cm.thread = t.containing_thread_id AND cm.user = m.user LEFT JOIN roles r ON r.id = m.role WHERE t.parent_thread_id IN (${batch}) OR t.containing_thread_id IN (${batch}) `; const [results] = await dbQuery(query); pushAll(rows, results); } const descendantThreadInfos: Map = new Map(); for (const row of rows) { const descendantThreadID = row.id.toString(); if (!descendantThreadInfos.has(descendantThreadID)) { descendantThreadInfos.set(descendantThreadID, { threadID: descendantThreadID, parentThreadID: row.parent_thread_id.toString(), containingThreadID: row.containing_thread_id.toString(), threadType: assertThreadType(row.type), depth: row.depth, users: new Map(), }); } const descendantThreadInfo = descendantThreadInfos.get(descendantThreadID); invariant( descendantThreadInfo, `value should exist for key ${descendantThreadID}`, ); const userID = row.user.toString(); descendantThreadInfo.users.set(userID, { curRole: row.role.toString(), curRolePermissions: JSON.parse(row.role_permissions), curPermissions: JSON.parse(row.permissions), curPermissionsForChildren: JSON.parse(row.permissions_for_children), curPermissionsFromParent: JSON.parse(row.permissions_from_parent), curMemberOfContainingThread: row.containing_role > 0, }); } for (const ancestor of ancestors) { const { threadID, changesByUser } = ancestor; for (const [userID, changes] of changesByUser) { for (const descendantThreadInfo of descendantThreadInfos.values()) { const { users, parentThreadID, containingThreadID, } = descendantThreadInfo; if (threadID !== parentThreadID && threadID !== containingThreadID) { continue; } let user = users.get(userID); if (!user) { user = {}; users.set(userID, user); } if (threadID === parentThreadID) { user.nextPermissionsFromParent = changes.permissionsForChildren; user.potentiallyNeedsUpdate = true; } if (threadID === containingThreadID) { user.nextMemberOfContainingThread = changes.userIsMember; if (!user.nextMemberOfContainingThread) { user.potentiallyNeedsUpdate = true; } } } } } return [...descendantThreadInfos.values()]; } function getDescendantDepth(descendant: DescendantInfo): number { return descendant.depth; } function getDescendantKey(descendant: DescendantInfo): string { return descendant.threadID; } function mergeDescendants( a: DescendantInfo, b: DescendantInfo, ): DescendantInfo { const { users: usersA, ...restA } = a; const { users: usersB, ...restB } = b; if (!_isEqual(restA)(restB)) { console.warn( `inconsistent descendantInfos ${JSON.stringify(restA)}, ` + JSON.stringify(restB), ); throw new ServerError('internal_error'); } const newUsers = new Map(usersA); for (const [userID, userFromB] of usersB) { const userFromA = newUsers.get(userID); if (!userFromA) { newUsers.set(userID, userFromB); } else { newUsers.set(userID, { ...userFromA, ...userFromB }); } } return { ...a, users: newUsers }; } type RecalculatePermissionsMemberInfo = { role?: ?string, permissions?: ?ThreadPermissionsBlob, permissionsForChildren?: ?ThreadPermissionsBlob, rolePermissions?: ?ThreadRolePermissionsBlob, memberOfContainingThread?: boolean, permissionsFromParent?: ?ThreadPermissionsBlob, }; async function recalculateThreadPermissions( threadID: string, ): Promise { const threadQuery = SQL` SELECT type, depth, parent_thread_id, containing_thread_id FROM threads WHERE id = ${threadID} `; const membershipQuery = SQL` SELECT m.user, m.role, m.permissions, m.permissions_for_children, r.permissions AS role_permissions, cm.role AS containing_role FROM threads t INNER JOIN memberships m ON m.thread = t.id LEFT JOIN roles r ON r.id = m.role LEFT JOIN memberships cm ON cm.user = m.user AND cm.thread = t.containing_thread_id WHERE t.id = ${threadID} `; const parentMembershipQuery = SQL` SELECT pm.user, pm.permissions_for_children AS permissions_from_parent FROM threads t INNER JOIN memberships pm ON pm.thread = t.parent_thread_id WHERE t.id = ${threadID} `; const [ [threadResults], [membershipResults], [parentMembershipResults], ] = await Promise.all([ dbQuery(threadQuery), dbQuery(membershipQuery), dbQuery(parentMembershipQuery), ]); if (threadResults.length !== 1) { throw new ServerError('internal_error'); } const [threadResult] = threadResults; const threadType = assertThreadType(threadResult.type); const depth = threadResult.depth; const hasContainingThreadID = threadResult.containing_thread_id !== null; const parentThreadID = threadResult.parent_thread_id?.toString(); const membershipInfo: Map< string, RecalculatePermissionsMemberInfo, > = new Map(); for (const row of membershipResults) { const userID = row.user.toString(); membershipInfo.set(userID, { role: row.role.toString(), permissions: JSON.parse(row.permissions), permissionsForChildren: JSON.parse(row.permissions_for_children), rolePermissions: JSON.parse(row.role_permissions), memberOfContainingThread: !!( row.containing_role && row.containing_role > 0 ), }); } for (const row of parentMembershipResults) { const userID = row.user.toString(); const permissionsFromParent = JSON.parse(row.permissions_from_parent); const membership = membershipInfo.get(userID); if (membership) { membership.permissionsFromParent = permissionsFromParent; } else { membershipInfo.set(userID, { permissionsFromParent: permissionsFromParent, }); } } const relationshipChangeset = new RelationshipChangeset(); const existingMemberIDs = membershipResults.map(row => row.user.toString()); if (threadID !== genesis.id) { relationshipChangeset.setAllRelationshipsExist(existingMemberIDs); } const parentMemberIDs = parentMembershipResults.map(row => row.user.toString(), ); if (parentThreadID && parentThreadID !== genesis.id) { relationshipChangeset.setAllRelationshipsExist(parentMemberIDs); } const membershipRows = []; const toUpdateDescendants = new Map(); for (const [userID, membership] of membershipInfo) { const { rolePermissions: intendedRolePermissions, permissionsFromParent, } = membership; const oldPermissions = membership?.permissions ?? null; const oldPermissionsForChildren = membership?.permissionsForChildren ?? null; const existingMembership = membership.role !== undefined; const oldRole = membership.role ?? '-1'; const memberOfContainingThread = hasContainingThreadID ? !!membership.memberOfContainingThread : true; const targetRole = memberOfContainingThread ? oldRole : '-1'; const rolePermissions = memberOfContainingThread ? intendedRolePermissions : null; const permissions = makePermissionsBlob( rolePermissions, permissionsFromParent, threadID, threadType, ); const permissionsForChildren = makePermissionsForChildrenBlob(permissions); const newRole = getRoleForPermissions(targetRole, permissions); const userLostMembership = Number(oldRole) > 0 && Number(newRole) <= 0; if (_isEqual(permissions)(oldPermissions) && oldRole === newRole) { // This thread and all of its descendants need no updates for this user, // since the corresponding memberships row is unchanged by this operation continue; } if (permissions) { membershipRows.push({ operation: 'save', intent: 'none', userID, threadID, userNeedsFullThreadDetails: false, permissions, permissionsForChildren, role: newRole, oldRole, }); } else { membershipRows.push({ operation: 'delete', intent: 'none', userID, threadID, oldRole, }); } if (permissions && !existingMembership && threadID !== genesis.id) { // If there was no membership row before, and we are creating one, // we'll need to make sure the new member has a relationship row with // each existing member. We handle guaranteeing that new members have // relationship rows with each other in saveMemberships. relationshipChangeset.setRelationshipsNeeded(userID, existingMemberIDs); } if ( userLostMembership || !_isEqual(permissionsForChildren)(oldPermissionsForChildren) ) { toUpdateDescendants.set(userID, { userIsMember: Number(newRole) > 0, permissionsForChildren, }); } } if (toUpdateDescendants.size > 0) { const { membershipRows: descendantMembershipRows, relationshipChangeset: descendantRelationshipChangeset, } = await updateDescendantPermissions({ threadID, depth, changesByUser: toUpdateDescendants, }); pushAll(membershipRows, descendantMembershipRows); relationshipChangeset.addAll(descendantRelationshipChangeset); } return { membershipRows, relationshipChangeset }; } const defaultSubscriptionString = JSON.stringify({ home: false, pushNotifs: false, }); const joinSubscriptionString = JSON.stringify({ home: true, pushNotifs: true }); const membershipInsertBatchSize = 50; async function saveMemberships(toSave: $ReadOnlyArray) { if (toSave.length === 0) { return; } const time = Date.now(); const insertRows = []; for (const rowToSave of toSave) { insertRows.push([ rowToSave.userID, rowToSave.threadID, rowToSave.role, time, rowToSave.intent === 'join' ? joinSubscriptionString : defaultSubscriptionString, rowToSave.permissions ? JSON.stringify(rowToSave.permissions) : null, rowToSave.permissionsForChildren ? JSON.stringify(rowToSave.permissionsForChildren) : null, rowToSave.unread ? 1 : 0, 0, ]); } // Logic below will only update an existing membership row's `subscription` // column if the user is either joining or leaving the thread. That means // there's no way to use this function to update a user's subscription without // also making them join or leave the thread. The reason we do this is because // we need to specify a value for `subscription` here, as it's a non-null // column and this is an INSERT, but we don't want to require people to have // to know the current `subscription` when they're just using this function to // update the permissions of an existing membership row. while (insertRows.length > 0) { const batch = insertRows.splice(0, membershipInsertBatchSize); const query = SQL` INSERT INTO memberships (user, thread, role, creation_time, subscription, permissions, permissions_for_children, last_message, last_read_message) VALUES ${batch} ON DUPLICATE KEY UPDATE subscription = IF( (role <= 0 AND VALUE(role) > 0) OR (role > 0 AND VALUE(role) <= 0), VALUE(subscription), subscription ), role = VALUE(role), permissions = VALUE(permissions), permissions_for_children = VALUE(permissions_for_children) `; await dbQuery(query); } } async function deleteMemberships( toDelete: $ReadOnlyArray, ) { if (toDelete.length === 0) { return; } const time = Date.now(); const insertRows = toDelete.map(rowToDelete => [ rowToDelete.userID, rowToDelete.threadID, -1, time, defaultSubscriptionString, null, null, 0, 0, ]); while (insertRows.length > 0) { const batch = insertRows.splice(0, membershipInsertBatchSize); const query = SQL` INSERT INTO memberships (user, thread, role, creation_time, subscription, permissions, permissions_for_children, last_message, last_read_message) VALUES ${batch} ON DUPLICATE KEY UPDATE role = -1, permissions = NULL, permissions_for_children = NULL, subscription = ${defaultSubscriptionString}, last_message = 0, last_read_message = 0 `; await dbQuery(query); } } const emptyCommitMembershipChangesetConfig = Object.freeze({}); // Specify non-empty changedThreadIDs to force updates to be generated for those // threads, presumably for reasons not covered in the changeset. calendarQuery // only needs to be specified if a JOIN_THREAD update will be generated for the // viewer, in which case it's necessary for knowing the set of entries to fetch. type ChangesetCommitResult = { ...FetchThreadInfosResult, ...CreateUpdatesResult, }; async function commitMembershipChangeset( viewer: Viewer, changeset: Changeset, { changedThreadIDs = new Set(), calendarQuery, updatesForCurrentSession = 'return', }: { +changedThreadIDs?: Set, +calendarQuery?: ?CalendarQuery, +updatesForCurrentSession?: UpdatesForCurrentSession, } = emptyCommitMembershipChangesetConfig, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const { membershipRows, relationshipChangeset } = changeset; const membershipRowMap = new Map(); for (const row of membershipRows) { const { userID, threadID } = row; changedThreadIDs.add(threadID); const pairString = `${userID}|${threadID}`; const existing = membershipRowMap.get(pairString); invariant( !existing || existing.intent === 'none' || row.intent === 'none', `multiple intents provided for ${pairString}`, ); if (!existing || existing.intent === 'none') { membershipRowMap.set(pairString, row); } } const toSave = [], toDelete = [], toRescindPushNotifs = []; for (const row of membershipRowMap.values()) { if ( row.operation === 'delete' || (row.operation === 'save' && Number(row.role) <= 0) ) { const { userID, threadID } = row; toRescindPushNotifs.push({ userID, threadID }); } if (row.operation === 'delete') { toDelete.push(row); } else { toSave.push(row); } } const threadsToSavedUsers = new Map(); for (const row of membershipRowMap.values()) { const { userID, threadID } = row; let savedUsers = threadsToSavedUsers.get(threadID); if (!savedUsers) { savedUsers = []; threadsToSavedUsers.set(threadID, savedUsers); } savedUsers.push(userID); } for (const [threadID, savedUsers] of threadsToSavedUsers) { if (threadID !== genesis.id) { relationshipChangeset.setAllRelationshipsNeeded(savedUsers); } } const relationshipRows = relationshipChangeset.getRows(); const [updateDatas] = await Promise.all([ updateChangedUndirectedRelationships(relationshipRows), saveMemberships(toSave), deleteMemberships(toDelete), rescindPushNotifsForMemberDeletion(toRescindPushNotifs), ]); // We fetch all threads here because old clients still expect the full list of // threads on most thread operations. Once verifyClientSupported gates on // codeVersion 62, we can add a WHERE clause on changedThreadIDs here const serverThreadInfoFetchResult = await fetchServerThreadInfos(); const { threadInfos: serverThreadInfos } = serverThreadInfoFetchResult; const time = Date.now(); for (const changedThreadID of changedThreadIDs) { const serverThreadInfo = serverThreadInfos[changedThreadID]; for (const memberInfo of serverThreadInfo.members) { const pairString = `${memberInfo.id}|${serverThreadInfo.id}`; const membershipRow = membershipRowMap.get(pairString); if (membershipRow) { continue; } updateDatas.push({ type: updateTypes.UPDATE_THREAD, userID: memberInfo.id, time, threadID: changedThreadID, }); } } for (const row of membershipRowMap.values()) { const { userID, threadID } = row; if (row.operation === 'delete' || row.role === '-1') { if (row.oldRole !== '-1') { updateDatas.push({ type: updateTypes.DELETE_THREAD, userID, time, threadID, }); } } else if (row.userNeedsFullThreadDetails) { updateDatas.push({ type: updateTypes.JOIN_THREAD, userID, time, threadID, }); } else { updateDatas.push({ type: updateTypes.UPDATE_THREAD, userID, time, threadID, }); } } const threadInfoFetchResult = rawThreadInfosFromServerThreadInfos( viewer, serverThreadInfoFetchResult, ); const { viewerUpdates, userInfos } = await createUpdates(updateDatas, { viewer, calendarQuery, ...threadInfoFetchResult, updatesForCurrentSession, }); return { ...threadInfoFetchResult, userInfos, viewerUpdates, }; } const emptyGetChangesetCommitResultConfig = Object.freeze({}); // When the user tries to create a new thread, it's possible for the client to // fail the creation even if a row gets added to the threads table. This may // occur due to a timeout (on either the client or server side), or due to some // error in the server code following the INSERT operation. Handling the error // scenario is more challenging since it would require detecting which set of // operations failed so we could retry them. As a result, this code is geared at // only handling the timeout scenario. async function getChangesetCommitResultForExistingThread( viewer: Viewer, threadID: string, otherUpdates: $ReadOnlyArray, { calendarQuery, updatesForCurrentSession = 'return', }: { +calendarQuery?: ?CalendarQuery, +updatesForCurrentSession?: UpdatesForCurrentSession, } = emptyGetChangesetCommitResultConfig, ): Promise { for (const update of otherUpdates) { if ( update.type === updateTypes.JOIN_THREAD && update.threadInfo.id === threadID ) { // If the JOIN_THREAD is already there we can expect // the appropriate UPDATE_USERs to be covered as well return { viewerUpdates: otherUpdates, userInfos: {} }; } } const time = Date.now(); const updateDatas = [ { type: updateTypes.JOIN_THREAD, userID: viewer.userID, time, threadID, targetSession: viewer.session, }, ]; // To figure out what UserInfos might be missing, we consider the worst case: // the same client previously attempted to create a thread with a non-friend // they found via search results, but the request timed out. In this scenario // the viewer might never have received the UPDATE_USER that would add that // UserInfo to their UserStore, but the server assumed the client had gotten // it because createUpdates was called with UpdatesForCurrentSession=return. // For completeness here we query for the full list of memberships rows in the // thread. We can't use fetchServerThreadInfos because it skips role=-1 rows const membershipsQuery = SQL` SELECT user FROM memberships WHERE thread = ${threadID} AND user != ${viewer.userID} `; const [results] = await dbQuery(membershipsQuery); for (const row of results) { updateDatas.push({ type: updateTypes.UPDATE_USER, userID: viewer.userID, time, updatedUserID: row.user.toString(), targetSession: viewer.session, }); } const { viewerUpdates, userInfos } = await createUpdates(updateDatas, { viewer, calendarQuery, updatesForCurrentSession, }); return { viewerUpdates: [...otherUpdates, ...viewerUpdates], userInfos }; } const rescindPushNotifsBatchSize = 3; async function rescindPushNotifsForMemberDeletion( toRescindPushNotifs: $ReadOnlyArray<{ +userID: string, +threadID: string }>, ): Promise { const queue = [...toRescindPushNotifs]; while (queue.length > 0) { const batch = queue.splice(0, rescindPushNotifsBatchSize); await Promise.all( batch.map(({ userID, threadID }) => rescindPushNotifs( SQL`n.thread = ${threadID} AND n.user = ${userID}`, SQL`IF(m.thread = ${threadID}, NULL, m.thread)`, ), ), ); } } async function recalculateAllThreadPermissions() { const getAllThreads = SQL`SELECT id FROM threads`; const [result] = await dbQuery(getAllThreads); // We handle each thread one-by-one to avoid a situation where a permission // calculation for a child thread, done during a call to // recalculateThreadPermissions for the parent thread, can be incorrectly // overriden by a call to recalculateThreadPermissions for the child thread. // If the changeset resulting from the parent call isn't committed before the // calculation is done for the child, the calculation done for the child can // be incorrect. const viewer = createScriptViewer(bots.commbot.userID); for (const row of result) { const threadID = row.id.toString(); const changeset = await recalculateThreadPermissions(threadID); await commitMembershipChangeset(viewer, changeset); } } async function updateRolesAndPermissionsForAllThreads() { const batchSize = 10; const fetchThreads = SQL`SELECT id, type, depth FROM threads`; const [result] = await dbQuery(fetchThreads); const allThreads = result.map(row => { return { id: row.id.toString(), type: assertThreadType(row.type), depth: row.depth, }; }); const viewer = createScriptViewer(bots.commbot.userID); const maxDepth = Math.max(...allThreads.map(row => row.depth)); for (let depth = 0; depth <= maxDepth; depth++) { const threads = allThreads.filter(row => row.depth === depth); console.log(`recalculating permissions for threads with depth ${depth}`); while (threads.length > 0) { const batch = threads.splice(0, batchSize); const membershipRows = []; const relationshipChangeset = new RelationshipChangeset(); await Promise.all( batch.map(async thread => { console.log(`updating roles for ${thread.id}`); await updateRoles(viewer, thread.id, thread.type); console.log(`recalculating permissions for ${thread.id}`); const { membershipRows: threadMembershipRows, relationshipChangeset: threadRelationshipChangeset, } = await recalculateThreadPermissions(thread.id); membershipRows.push(...threadMembershipRows); relationshipChangeset.addAll(threadRelationshipChangeset); }), ); console.log(`committing batch ${JSON.stringify(batch)}`); await commitMembershipChangeset(viewer, { membershipRows, relationshipChangeset, }); } } } export { changeRole, recalculateThreadPermissions, getChangesetCommitResultForExistingThread, saveMemberships, commitMembershipChangeset, recalculateAllThreadPermissions, updateRolesAndPermissionsForAllThreads, }; diff --git a/keyserver/src/updaters/thread-updaters.js b/keyserver/src/updaters/thread-updaters.js index 2663f445b..bb16fb89f 100644 --- a/keyserver/src/updaters/thread-updaters.js +++ b/keyserver/src/updaters/thread-updaters.js @@ -1,846 +1,849 @@ // @flow -import { filteredThreadIDs } from 'lib/selectors/calendar-filter-selectors'; +import { filteredThreadIDs } from 'lib/selectors/calendar-filter-selectors.js'; import { threadHasAdminRole, roleIsAdminRole, viewerIsMember, getThreadTypeParentRequirement, -} from 'lib/shared/thread-utils'; -import { hasMinCodeVersion } from 'lib/shared/version-utils'; -import type { Shape } from 'lib/types/core'; -import { messageTypes, defaultNumberPerThread } from 'lib/types/message-types'; +} from 'lib/shared/thread-utils.js'; +import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; +import type { Shape } from 'lib/types/core.js'; +import { + messageTypes, + defaultNumberPerThread, +} from 'lib/types/message-types.js'; import { type RoleChangeRequest, type ChangeThreadSettingsResult, type RemoveMembersRequest, type LeaveThreadRequest, type LeaveThreadResult, type UpdateThreadRequest, type ServerThreadJoinRequest, type ThreadJoinResult, threadPermissions, threadTypes, -} from 'lib/types/thread-types'; -import { updateTypes } from 'lib/types/update-types'; -import { ServerError } from 'lib/utils/errors'; -import { promiseAll } from 'lib/utils/promises'; -import { firstLine } from 'lib/utils/string-utils'; - -import createMessages from '../creators/message-creator'; -import { getRolePermissionBlobs } from '../creators/role-creator'; -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL } from '../database/database'; -import { fetchEntryInfos } from '../fetchers/entry-fetchers'; -import { fetchMessageInfos } from '../fetchers/message-fetchers'; +} from 'lib/types/thread-types.js'; +import { updateTypes } from 'lib/types/update-types.js'; +import { ServerError } from 'lib/utils/errors.js'; +import { promiseAll } from 'lib/utils/promises.js'; +import { firstLine } from 'lib/utils/string-utils.js'; + +import createMessages from '../creators/message-creator.js'; +import { getRolePermissionBlobs } from '../creators/role-creator.js'; +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { fetchEntryInfos } from '../fetchers/entry-fetchers.js'; +import { fetchMessageInfos } from '../fetchers/message-fetchers.js'; import { fetchThreadInfos, fetchServerThreadInfos, determineThreadAncestry, -} from '../fetchers/thread-fetchers'; +} from '../fetchers/thread-fetchers.js'; import { checkThreadPermission, viewerIsMember as fetchViewerIsMember, checkThread, validateCandidateMembers, -} from '../fetchers/thread-permission-fetchers'; +} from '../fetchers/thread-permission-fetchers.js'; import { verifyUserIDs, verifyUserOrCookieIDs, -} from '../fetchers/user-fetchers'; -import type { Viewer } from '../session/viewer'; -import RelationshipChangeset from '../utils/relationship-changeset'; -import { updateRoles } from './role-updaters'; +} from '../fetchers/user-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; +import RelationshipChangeset from '../utils/relationship-changeset.js'; +import { updateRoles } from './role-updaters.js'; import { changeRole, recalculateThreadPermissions, commitMembershipChangeset, -} from './thread-permission-updaters'; +} from './thread-permission-updaters.js'; async function updateRole( viewer: Viewer, request: RoleChangeRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const [memberIDs, hasPermission] = await Promise.all([ verifyUserIDs(request.memberIDs), checkThreadPermission( viewer, request.threadID, threadPermissions.CHANGE_ROLE, ), ]); if (memberIDs.length === 0) { throw new ServerError('invalid_parameters'); } if (!hasPermission) { throw new ServerError('invalid_credentials'); } const query = SQL` SELECT user, role FROM memberships WHERE user IN (${memberIDs}) AND thread = ${request.threadID} `; const [result] = await dbQuery(query); let nonMemberUser = false; let numResults = 0; for (const row of result) { if (row.role <= 0) { nonMemberUser = true; break; } numResults++; } if (nonMemberUser || numResults < memberIDs.length) { throw new ServerError('invalid_parameters'); } const changeset = await changeRole(request.threadID, memberIDs, request.role); const { threadInfos, viewerUpdates } = await commitMembershipChangeset( viewer, changeset, ); const messageData = { type: messageTypes.CHANGE_ROLE, threadID: request.threadID, creatorID: viewer.userID, time: Date.now(), userIDs: memberIDs, newRole: request.role, }; const newMessageInfos = await createMessages(viewer, [messageData]); if (hasMinCodeVersion(viewer.platformDetails, 62)) { return { updatesResult: { newUpdates: viewerUpdates }, newMessageInfos }; } return { threadInfo: threadInfos[request.threadID], threadInfos, updatesResult: { newUpdates: viewerUpdates, }, newMessageInfos, }; } async function removeMembers( viewer: Viewer, request: RemoveMembersRequest, ): Promise { const viewerID = viewer.userID; if (request.memberIDs.includes(viewerID)) { throw new ServerError('invalid_parameters'); } const [memberIDs, hasPermission] = await Promise.all([ verifyUserOrCookieIDs(request.memberIDs), checkThreadPermission( viewer, request.threadID, threadPermissions.REMOVE_MEMBERS, ), ]); if (memberIDs.length === 0) { throw new ServerError('invalid_parameters'); } if (!hasPermission) { throw new ServerError('invalid_credentials'); } const query = SQL` SELECT m.user, m.role, t.default_role FROM memberships m LEFT JOIN threads t ON t.id = m.thread WHERE m.user IN (${memberIDs}) AND m.thread = ${request.threadID} `; const [result] = await dbQuery(query); let nonDefaultRoleUser = false; const actualMemberIDs = []; for (const row of result) { if (row.role <= 0) { continue; } actualMemberIDs.push(row.user.toString()); if (row.role !== row.default_role) { nonDefaultRoleUser = true; } } if (nonDefaultRoleUser) { const hasChangeRolePermission = await checkThreadPermission( viewer, request.threadID, threadPermissions.CHANGE_ROLE, ); if (!hasChangeRolePermission) { throw new ServerError('invalid_credentials'); } } const changeset = await changeRole(request.threadID, actualMemberIDs, 0); const { threadInfos, viewerUpdates } = await commitMembershipChangeset( viewer, changeset, ); const newMessageInfos = await (async () => { if (actualMemberIDs.length === 0) { return []; } const messageData = { type: messageTypes.REMOVE_MEMBERS, threadID: request.threadID, creatorID: viewerID, time: Date.now(), removedUserIDs: actualMemberIDs, }; return await createMessages(viewer, [messageData]); })(); if (hasMinCodeVersion(viewer.platformDetails, 62)) { return { updatesResult: { newUpdates: viewerUpdates }, newMessageInfos }; } return { threadInfo: threadInfos[request.threadID], threadInfos, updatesResult: { newUpdates: viewerUpdates, }, newMessageInfos, }; } async function leaveThread( viewer: Viewer, request: LeaveThreadRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const [fetchThreadResult, hasPermission] = await Promise.all([ fetchThreadInfos(viewer, SQL`t.id = ${request.threadID}`), checkThreadPermission( viewer, request.threadID, threadPermissions.LEAVE_THREAD, ), ]); const threadInfo = fetchThreadResult.threadInfos[request.threadID]; if (!viewerIsMember(threadInfo)) { if (hasMinCodeVersion(viewer.platformDetails, 62)) { return { updatesResult: { newUpdates: [] }, }; } const { threadInfos } = await fetchThreadInfos(viewer); return { threadInfos, updatesResult: { newUpdates: [], }, }; } if (!hasPermission) { throw new ServerError('invalid_parameters'); } const viewerID = viewer.userID; if (threadHasAdminRole(threadInfo)) { let otherUsersExist = false; let otherAdminsExist = false; for (const member of threadInfo.members) { const role = member.role; if (!role || member.id === viewerID) { continue; } otherUsersExist = true; if (roleIsAdminRole(threadInfo.roles[role])) { otherAdminsExist = true; break; } } if (otherUsersExist && !otherAdminsExist) { throw new ServerError('invalid_parameters'); } } const changeset = await changeRole(request.threadID, [viewerID], 0); const { threadInfos, viewerUpdates } = await commitMembershipChangeset( viewer, changeset, ); const messageData = { type: messageTypes.LEAVE_THREAD, threadID: request.threadID, creatorID: viewerID, time: Date.now(), }; await createMessages(viewer, [messageData]); if (hasMinCodeVersion(viewer.platformDetails, 62)) { return { updatesResult: { newUpdates: viewerUpdates } }; } return { threadInfos, updatesResult: { newUpdates: viewerUpdates, }, }; } type UpdateThreadOptions = Shape<{ +forceAddMembers: boolean, +forceUpdateRoot: boolean, +silenceMessages: boolean, +ignorePermissions: boolean, }>; async function updateThread( viewer: Viewer, request: UpdateThreadRequest, options?: UpdateThreadOptions, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const forceAddMembers = options?.forceAddMembers ?? false; const forceUpdateRoot = options?.forceUpdateRoot ?? false; const silenceMessages = options?.silenceMessages ?? false; const ignorePermissions = (options?.ignorePermissions && viewer.isScriptViewer) ?? false; const validationPromises = {}; const changedFields = {}; const sqlUpdate = {}; const untrimmedName = request.changes.name; if (untrimmedName !== undefined && untrimmedName !== null) { const name = firstLine(untrimmedName); changedFields.name = name; sqlUpdate.name = name ?? null; } const { description } = request.changes; if (description !== undefined && description !== null) { changedFields.description = description; sqlUpdate.description = description ?? null; } if (request.changes.color) { const color = request.changes.color.toLowerCase(); changedFields.color = color; sqlUpdate.color = color; } const { parentThreadID } = request.changes; if (parentThreadID !== undefined) { // TODO some sort of message when this changes sqlUpdate.parent_thread_id = parentThreadID; } const threadType = request.changes.type; if (threadType !== null && threadType !== undefined) { changedFields.type = threadType; sqlUpdate.type = threadType; } if ( !ignorePermissions && threadType !== null && threadType !== undefined && threadType !== threadTypes.COMMUNITY_OPEN_SUBTHREAD && threadType !== threadTypes.COMMUNITY_SECRET_SUBTHREAD ) { throw new ServerError('invalid_parameters'); } const newMemberIDs = request.changes.newMemberIDs && request.changes.newMemberIDs.length > 0 ? [...new Set(request.changes.newMemberIDs)] : null; if ( Object.keys(sqlUpdate).length === 0 && !newMemberIDs && !forceUpdateRoot ) { throw new ServerError('invalid_parameters'); } validationPromises.serverThreadInfos = fetchServerThreadInfos( SQL`t.id = ${request.threadID}`, ); validationPromises.hasNecessaryPermissions = (async () => { if (ignorePermissions) { return; } const checks = []; if (sqlUpdate.name !== undefined) { checks.push({ check: 'permission', permission: threadPermissions.EDIT_THREAD_NAME, }); } if (sqlUpdate.description !== undefined) { checks.push({ check: 'permission', permission: threadPermissions.EDIT_THREAD_DESCRIPTION, }); } if (sqlUpdate.color !== undefined) { checks.push({ check: 'permission', permission: threadPermissions.EDIT_THREAD_COLOR, }); } if (parentThreadID !== undefined || sqlUpdate.type !== undefined) { checks.push({ check: 'permission', permission: threadPermissions.EDIT_PERMISSIONS, }); } if (newMemberIDs) { checks.push({ check: 'permission', permission: threadPermissions.ADD_MEMBERS, }); } const hasNecessaryPermissions = await checkThread( viewer, request.threadID, checks, ); if (!hasNecessaryPermissions) { throw new ServerError('invalid_credentials'); } })(); const { serverThreadInfos } = await promiseAll(validationPromises); const serverThreadInfo = serverThreadInfos.threadInfos[request.threadID]; if (!serverThreadInfo) { throw new ServerError('internal_error'); } // Threads with source message should be visible to everyone, but we can't // guarantee it for COMMUNITY_SECRET_SUBTHREAD threads so we forbid it for // now. In the future, if we want to support this, we would need to unlink the // source message. if ( threadType !== null && threadType !== undefined && threadType !== threadTypes.SIDEBAR && threadType !== threadTypes.COMMUNITY_OPEN_SUBTHREAD && serverThreadInfo.sourceMessageID ) { throw new ServerError('invalid_parameters'); } // You can't change the parent thread of a current or former SIDEBAR if (parentThreadID !== undefined && serverThreadInfo.sourceMessageID) { throw new ServerError('invalid_parameters'); } const oldThreadType = serverThreadInfo.type; const oldParentThreadID = serverThreadInfo.parentThreadID; const oldContainingThreadID = serverThreadInfo.containingThreadID; const oldCommunity = serverThreadInfo.community; const oldDepth = serverThreadInfo.depth; const nextThreadType = threadType !== null && threadType !== undefined ? threadType : oldThreadType; let nextParentThreadID = parentThreadID !== undefined ? parentThreadID : oldParentThreadID; // Does the new thread type preclude a parent? if ( threadType !== undefined && threadType !== null && getThreadTypeParentRequirement(threadType) === 'disabled' && nextParentThreadID !== null ) { nextParentThreadID = null; sqlUpdate.parent_thread_id = null; } // Does the new thread type require a parent? if ( threadType !== undefined && threadType !== null && getThreadTypeParentRequirement(threadType) === 'required' && nextParentThreadID === null ) { throw new ServerError('no_parent_thread_specified'); } const determineThreadAncestryPromise = determineThreadAncestry( nextParentThreadID, nextThreadType, ); const confirmParentPermissionPromise = (async () => { if (ignorePermissions || !nextParentThreadID) { return; } if ( nextParentThreadID === oldParentThreadID && (nextThreadType === threadTypes.SIDEBAR) === (oldThreadType === threadTypes.SIDEBAR) ) { return; } const hasParentPermission = await checkThreadPermission( viewer, nextParentThreadID, nextThreadType === threadTypes.SIDEBAR ? threadPermissions.CREATE_SIDEBARS : threadPermissions.CREATE_SUBCHANNELS, ); if (!hasParentPermission) { throw new ServerError('invalid_parameters'); } })(); const rolesNeedUpdate = forceUpdateRoot || nextThreadType !== oldThreadType; const validateNewMembersPromise = (async () => { if (!newMemberIDs || ignorePermissions) { return; } const defaultRolePermissionsPromise = (async () => { let rolePermissions; if (!rolesNeedUpdate) { const rolePermissionsQuery = SQL` SELECT r.permissions FROM threads t LEFT JOIN roles r ON r.id = t.default_role WHERE t.id = ${request.threadID} `; const [result] = await dbQuery(rolePermissionsQuery); if (result.length > 0) { rolePermissions = JSON.parse(result[0].permissions); } } if (!rolePermissions) { rolePermissions = getRolePermissionBlobs(nextThreadType).Members; } return rolePermissions; })(); const [defaultRolePermissions, nextThreadAncestry] = await Promise.all([ defaultRolePermissionsPromise, determineThreadAncestryPromise, ]); const { newMemberIDs: validatedIDs } = await validateCandidateMembers( viewer, { newMemberIDs }, { threadType: nextThreadType, parentThreadID: nextParentThreadID, containingThreadID: nextThreadAncestry.containingThreadID, defaultRolePermissions, }, { requireRelationship: !forceAddMembers }, ); if ( validatedIDs && Number(validatedIDs?.length) < Number(newMemberIDs?.length) ) { throw new ServerError('invalid_credentials'); } })(); const { nextThreadAncestry } = await promiseAll({ nextThreadAncestry: determineThreadAncestryPromise, confirmParentPermissionPromise, validateNewMembersPromise, }); if (nextThreadAncestry.containingThreadID !== oldContainingThreadID) { sqlUpdate.containing_thread_id = nextThreadAncestry.containingThreadID; } if (nextThreadAncestry.community !== oldCommunity) { if (!ignorePermissions) { throw new ServerError('invalid_parameters'); } sqlUpdate.community = nextThreadAncestry.community; } if (nextThreadAncestry.depth !== oldDepth) { sqlUpdate.depth = nextThreadAncestry.depth; } const updateQueryPromise = (async () => { if (Object.keys(sqlUpdate).length === 0) { return; } const updateQuery = SQL` UPDATE threads SET ${sqlUpdate} WHERE id = ${request.threadID} `; await dbQuery(updateQuery); })(); const updateRolesPromise = (async () => { if (rolesNeedUpdate) { await updateRoles(viewer, request.threadID, nextThreadType); } })(); const intermediatePromises = {}; intermediatePromises.updateQuery = updateQueryPromise; intermediatePromises.updateRoles = updateRolesPromise; if (newMemberIDs) { intermediatePromises.addMembersChangeset = (async () => { await Promise.all([updateQueryPromise, updateRolesPromise]); return await changeRole(request.threadID, newMemberIDs, null, { setNewMembersToUnread: true, }); })(); } const threadRootChanged = rolesNeedUpdate || nextParentThreadID !== oldParentThreadID; if (threadRootChanged) { intermediatePromises.recalculatePermissionsChangeset = (async () => { await Promise.all([updateQueryPromise, updateRolesPromise]); return await recalculateThreadPermissions(request.threadID); })(); } const { addMembersChangeset, recalculatePermissionsChangeset, } = await promiseAll(intermediatePromises); const membershipRows = []; const relationshipChangeset = new RelationshipChangeset(); if (recalculatePermissionsChangeset) { const { membershipRows: recalculateMembershipRows, relationshipChangeset: recalculateRelationshipChangeset, } = recalculatePermissionsChangeset; membershipRows.push(...recalculateMembershipRows); relationshipChangeset.addAll(recalculateRelationshipChangeset); } let addedMemberIDs; if (addMembersChangeset) { const { membershipRows: addMembersMembershipRows, relationshipChangeset: addMembersRelationshipChangeset, } = addMembersChangeset; addedMemberIDs = addMembersMembershipRows .filter( row => row.operation === 'save' && row.threadID === request.threadID && Number(row.role) > 0, ) .map(row => row.userID); membershipRows.push(...addMembersMembershipRows); relationshipChangeset.addAll(addMembersRelationshipChangeset); } const changeset = { membershipRows, relationshipChangeset }; const { threadInfos, viewerUpdates } = await commitMembershipChangeset( viewer, changeset, { // This forces an update for this thread, // regardless of whether any membership rows are changed changedThreadIDs: Object.keys(sqlUpdate).length > 0 ? new Set([request.threadID]) : new Set(), }, ); let newMessageInfos = []; if (!silenceMessages) { const time = Date.now(); const messageDatas = []; for (const fieldName in changedFields) { const newValue = changedFields[fieldName]; messageDatas.push({ type: messageTypes.CHANGE_SETTINGS, threadID: request.threadID, creatorID: viewer.userID, time, field: fieldName, value: newValue, }); } if (addedMemberIDs && addedMemberIDs.length > 0) { messageDatas.push({ type: messageTypes.ADD_MEMBERS, threadID: request.threadID, creatorID: viewer.userID, time, addedUserIDs: addedMemberIDs, }); } newMessageInfos = await createMessages(viewer, messageDatas); } if (hasMinCodeVersion(viewer.platformDetails, 62)) { return { updatesResult: { newUpdates: viewerUpdates }, newMessageInfos }; } return { threadInfo: threadInfos[request.threadID], threadInfos, updatesResult: { newUpdates: viewerUpdates, }, newMessageInfos, }; } async function joinThread( viewer: Viewer, request: ServerThreadJoinRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const [isMember, hasPermission] = await Promise.all([ fetchViewerIsMember(viewer, request.threadID), checkThreadPermission( viewer, request.threadID, threadPermissions.JOIN_THREAD, ), ]); if (!hasPermission) { throw new ServerError('invalid_parameters'); } // TODO: determine code version const hasCodeVersionBelow87 = !hasMinCodeVersion(viewer.platformDetails, 87); const hasCodeVersionBelow62 = !hasMinCodeVersion(viewer.platformDetails, 62); const { calendarQuery } = request; if (isMember) { const response: ThreadJoinResult = { rawMessageInfos: [], truncationStatuses: {}, userInfos: {}, updatesResult: { newUpdates: [], }, }; if (calendarQuery && hasCodeVersionBelow87) { response.rawEntryInfos = []; } if (hasCodeVersionBelow62) { response.threadInfos = {}; } return response; } if (calendarQuery) { const threadFilterIDs = filteredThreadIDs(calendarQuery.filters); if ( !threadFilterIDs || threadFilterIDs.size !== 1 || threadFilterIDs.values().next().value !== request.threadID ) { throw new ServerError('invalid_parameters'); } } const changeset = await changeRole(request.threadID, [viewer.userID], null); const membershipResult = await commitMembershipChangeset(viewer, changeset, { calendarQuery, }); const messageData = { type: messageTypes.JOIN_THREAD, threadID: request.threadID, creatorID: viewer.userID, time: Date.now(), }; const newMessages = await createMessages(viewer, [messageData]); const messageSelectionCriteria = { threadCursors: { [request.threadID]: false }, }; if (!hasCodeVersionBelow87) { return { rawMessageInfos: newMessages, truncationStatuses: {}, userInfos: membershipResult.userInfos, updatesResult: { newUpdates: membershipResult.viewerUpdates, }, }; } const [fetchMessagesResult, fetchEntriesResult] = await Promise.all([ fetchMessageInfos(viewer, messageSelectionCriteria, defaultNumberPerThread), calendarQuery ? fetchEntryInfos(viewer, [calendarQuery]) : undefined, ]); const rawEntryInfos = fetchEntriesResult && fetchEntriesResult.rawEntryInfos; const response: ThreadJoinResult = { rawMessageInfos: fetchMessagesResult.rawMessageInfos, truncationStatuses: fetchMessagesResult.truncationStatuses, userInfos: membershipResult.userInfos, updatesResult: { newUpdates: membershipResult.viewerUpdates, }, }; if (hasCodeVersionBelow62) { response.threadInfos = membershipResult.threadInfos; } if (rawEntryInfos) { response.rawEntryInfos = rawEntryInfos; } return response; } async function updateThreadMembers(viewer: Viewer) { const { threadInfos } = await fetchThreadInfos( viewer, SQL`t.parent_thread_id IS NOT NULL `, ); const updateDatas = []; const time = Date.now(); for (const threadID in threadInfos) { updateDatas.push({ type: updateTypes.UPDATE_THREAD, userID: viewer.id, time, threadID: threadID, targetSession: viewer.session, }); } await createUpdates(updateDatas); } export { updateRole, removeMembers, leaveThread, updateThread, joinThread, updateThreadMembers, }; diff --git a/keyserver/src/updaters/upload-updaters.js b/keyserver/src/updaters/upload-updaters.js index be3c516a2..47425e6c0 100644 --- a/keyserver/src/updaters/upload-updaters.js +++ b/keyserver/src/updaters/upload-updaters.js @@ -1,38 +1,38 @@ // @flow import type { MediaMessageServerDBContent } from 'lib/types/messages/media.js'; import { getUploadIDsFromMediaMessageServerDBContents } from 'lib/types/messages/media.js'; -import { dbQuery, SQL } from '../database/database'; -import type { Viewer } from '../session/viewer'; +import { dbQuery, SQL } from '../database/database.js'; +import type { Viewer } from '../session/viewer.js'; async function assignMedia( viewer: Viewer, mediaIDs: $ReadOnlyArray, containerID: string, ): Promise { const query = SQL` UPDATE uploads SET container = ${containerID} WHERE id IN (${mediaIDs}) AND uploader = ${viewer.id} AND container IS NULL `; await dbQuery(query); } async function assignMessageContainerToMedia( viewer: Viewer, mediaMessageContents: $ReadOnlyArray, containerID: string, ): Promise { const uploadIDs = getUploadIDsFromMediaMessageServerDBContents( mediaMessageContents, ); const query = SQL` UPDATE uploads SET container = ${containerID} WHERE id IN (${uploadIDs}) AND uploader = ${viewer.id} AND container IS NULL `; await dbQuery(query); } export { assignMedia, assignMessageContainerToMedia }; diff --git a/keyserver/src/updaters/user-subscription-updaters.js b/keyserver/src/updaters/user-subscription-updaters.js index 1548f51ec..4f15379ac 100644 --- a/keyserver/src/updaters/user-subscription-updaters.js +++ b/keyserver/src/updaters/user-subscription-updaters.js @@ -1,63 +1,63 @@ // @flow -import { viewerIsMember } from 'lib/shared/thread-utils'; +import { viewerIsMember } from 'lib/shared/thread-utils.js'; import type { ThreadSubscription, SubscriptionUpdateRequest, -} from 'lib/types/subscription-types'; -import { updateTypes } from 'lib/types/update-types'; -import { ServerError } from 'lib/utils/errors'; - -import { createUpdates } from '../creators/update-creator'; -import { dbQuery, SQL } from '../database/database'; -import { fetchThreadInfos } from '../fetchers/thread-fetchers'; -import type { Viewer } from '../session/viewer'; +} from 'lib/types/subscription-types.js'; +import { updateTypes } from 'lib/types/update-types.js'; +import { ServerError } from 'lib/utils/errors.js'; + +import { createUpdates } from '../creators/update-creator.js'; +import { dbQuery, SQL } from '../database/database.js'; +import { fetchThreadInfos } from '../fetchers/thread-fetchers.js'; +import type { Viewer } from '../session/viewer.js'; async function userSubscriptionUpdater( viewer: Viewer, update: SubscriptionUpdateRequest, ): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const { threadInfos } = await fetchThreadInfos( viewer, SQL`t.id = ${update.threadID}`, ); const threadInfo = threadInfos[update.threadID]; if (!viewerIsMember(threadInfo)) { throw new ServerError('not_member'); } const promises = []; const newSubscription = { ...threadInfo.currentUser.subscription, ...update.updatedFields, }; const saveQuery = SQL` UPDATE memberships SET subscription = ${JSON.stringify(newSubscription)} WHERE user = ${viewer.userID} AND thread = ${update.threadID} `; promises.push(dbQuery(saveQuery)); const time = Date.now(); const updateDatas = [ { type: updateTypes.UPDATE_THREAD, userID: viewer.userID, time, threadID: update.threadID, }, ]; promises.push( createUpdates(updateDatas, { viewer, updatesForCurrentSession: 'ignore' }), ); await Promise.all(promises); return newSubscription; } export { userSubscriptionUpdater }; diff --git a/keyserver/src/uploads/media-utils.js b/keyserver/src/uploads/media-utils.js index bc6f7ca2a..83e9209fe 100644 --- a/keyserver/src/uploads/media-utils.js +++ b/keyserver/src/uploads/media-utils.js @@ -1,174 +1,174 @@ // @flow import bmp from '@vingle/bmp-js'; import invariant from 'invariant'; import sharp from 'sharp'; import { serverTranscodableTypes, serverCanHandleTypes, readableFilename, -} from 'lib/media/file-utils'; -import { getImageProcessingPlan } from 'lib/media/image-utils'; -import type { Dimensions } from 'lib/types/media-types'; -import { deepFileInfoFromData } from 'web/media/file-utils'; +} from 'lib/media/file-utils.js'; +import { getImageProcessingPlan } from 'lib/media/image-utils.js'; +import type { Dimensions } from 'lib/types/media-types.js'; +import { deepFileInfoFromData } from 'web/media/file-utils.js'; -import type { UploadInput } from '../creators/upload-creator'; +import type { UploadInput } from '../creators/upload-creator.js'; function initializeSharp(buffer: Buffer, mime: string) { if (mime !== 'image/bmp') { return sharp(buffer); } const bitmap = bmp.decode(buffer, true); return sharp(bitmap.data, { raw: { width: bitmap.width, height: bitmap.height, channels: 4, }, }); } async function validateAndConvert( initialBuffer: Buffer, initialName: string, inputDimensions: ?Dimensions, inputLoop: boolean, size: number, // in bytes ): Promise { const { mime, mediaType } = deepFileInfoFromData(initialBuffer); if (!mime || !mediaType) { return null; } if (!serverCanHandleTypes.has(mime)) { return null; } if (mediaType === 'video') { invariant( inputDimensions, 'inputDimensions should be set in validateAndConvert', ); return { mime: mime, mediaType: mediaType, name: initialName, buffer: initialBuffer, dimensions: inputDimensions, loop: inputLoop, }; } if (!serverTranscodableTypes.has(mime)) { // This should've gotten converted on the client return null; } return convertImage( initialBuffer, mime, initialName, inputDimensions, inputLoop, size, ); } async function convertImage( initialBuffer: Buffer, mime: string, initialName: string, inputDimensions: ?Dimensions, inputLoop: boolean, size: number, ): Promise { let sharpImage, metadata; try { sharpImage = initializeSharp(initialBuffer, mime); metadata = await sharpImage.metadata(); } catch (e) { return null; } let initialDimensions = inputDimensions; if (!initialDimensions) { if (metadata.orientation && metadata.orientation > 4) { initialDimensions = { width: metadata.height, height: metadata.width }; } else { initialDimensions = { width: metadata.width, height: metadata.height }; } } const plan = getImageProcessingPlan({ inputMIME: mime, inputDimensions: initialDimensions, inputFileSize: size, inputOrientation: metadata.orientation, }); if (plan.action === 'none') { const name = readableFilename(initialName, mime); invariant(name, `should be able to construct filename for ${mime}`); return { mime, mediaType: 'photo', name, buffer: initialBuffer, dimensions: initialDimensions, loop: inputLoop, }; } console.log(`processing image with ${JSON.stringify(plan)}`); const { targetMIME, compressionRatio, fitInside, shouldRotate } = plan; if (shouldRotate) { sharpImage = sharpImage.rotate(); } if (fitInside) { sharpImage = sharpImage.resize(fitInside.width, fitInside.height, { fit: 'inside', withoutEnlargement: true, }); } if (targetMIME === 'image/png') { sharpImage = sharpImage.png(); } else { sharpImage = sharpImage.jpeg({ quality: compressionRatio * 100 }); } const { data: convertedBuffer, info } = await sharpImage.toBuffer({ resolveWithObject: true, }); const convertedDimensions = { width: info.width, height: info.height }; const { mime: convertedMIME, mediaType: convertedMediaType, } = deepFileInfoFromData(convertedBuffer); if ( !convertedMIME || !convertedMediaType || convertedMIME !== targetMIME || convertedMediaType !== 'photo' ) { return null; } const convertedName = readableFilename(initialName, targetMIME); if (!convertedName) { return null; } return { mime: targetMIME, mediaType: 'photo', name: convertedName, buffer: convertedBuffer, dimensions: convertedDimensions, loop: inputLoop, }; } export { validateAndConvert }; diff --git a/keyserver/src/uploads/uploads.js b/keyserver/src/uploads/uploads.js index 68cbb4298..057a73f1d 100644 --- a/keyserver/src/uploads/uploads.js +++ b/keyserver/src/uploads/uploads.js @@ -1,146 +1,146 @@ // @flow import type { $Request, $Response, Middleware } from 'express'; import invariant from 'invariant'; import multer from 'multer'; import { Readable } from 'stream'; import type { UploadMultimediaResult, UploadDeletionRequest, Dimensions, -} from 'lib/types/media-types'; -import { ServerError } from 'lib/utils/errors'; +} from 'lib/types/media-types.js'; +import { ServerError } from 'lib/utils/errors.js'; -import createUploads from '../creators/upload-creator'; -import { deleteUpload } from '../deleters/upload-deleters'; +import createUploads from '../creators/upload-creator.js'; +import { deleteUpload } from '../deleters/upload-deleters.js'; import { fetchUpload, fetchUploadChunk, getUploadSize, -} from '../fetchers/upload-fetchers'; -import type { MulterRequest } from '../responders/handlers'; -import type { Viewer } from '../session/viewer'; -import { validateAndConvert } from './media-utils'; +} from '../fetchers/upload-fetchers.js'; +import type { MulterRequest } from '../responders/handlers.js'; +import type { Viewer } from '../session/viewer.js'; +import { validateAndConvert } from './media-utils.js'; const upload = multer(); const multerProcessor: Middleware<> = upload.array('multimedia'); type MultimediaUploadResult = { results: UploadMultimediaResult[], }; async function multimediaUploadResponder( viewer: Viewer, req: MulterRequest, ): Promise { const { files, body } = req; if (!files || !body || typeof body !== 'object') { throw new ServerError('invalid_parameters'); } const overrideFilename = files.length === 1 && body.filename ? body.filename : null; if (overrideFilename && typeof overrideFilename !== 'string') { throw new ServerError('invalid_parameters'); } const inputHeight = files.length === 1 && body.height ? parseInt(body.height) : null; const inputWidth = files.length === 1 && body.width ? parseInt(body.width) : null; if (!!inputHeight !== !!inputWidth) { throw new ServerError('invalid_parameters'); } const inputDimensions: ?Dimensions = inputHeight && inputWidth ? { height: inputHeight, width: inputWidth } : null; const inputLoop = !!(files.length === 1 && body.loop); const validationResults = await Promise.all( files.map(({ buffer, size, originalname }) => validateAndConvert( buffer, overrideFilename ? overrideFilename : originalname, inputDimensions, inputLoop, size, ), ), ); const uploadInfos = validationResults.filter(Boolean); if (uploadInfos.length === 0) { throw new ServerError('invalid_parameters'); } const results = await createUploads(viewer, uploadInfos); return { results }; } async function uploadDownloadResponder( viewer: Viewer, req: $Request, res: $Response, ): Promise { const { uploadID, secret } = req.params; if (!uploadID || !secret) { throw new ServerError('invalid_parameters'); } if (!req.headers.range) { const { content, mime } = await fetchUpload(viewer, uploadID, secret); res.type(mime); res.set('Cache-Control', 'public, max-age=31557600, immutable'); res.send(content); } else { const totalUploadSize = await getUploadSize(uploadID, secret); const range = req.range(totalUploadSize); if (typeof range === 'number' && range < 0) { throw new ServerError( range === -1 ? 'unsatisfiable_range' : 'malformed_header_string', ); } invariant( Array.isArray(range), 'range should be Array in uploadDownloadResponder!', ); const { start, end } = range[0]; const respWidth = end - start + 1; const { content, mime } = await fetchUploadChunk( uploadID, secret, start, respWidth, ); const respRange = `${start}-${end}/${totalUploadSize}`; const respHeaders = { 'Accept-Ranges': 'bytes', 'Content-Range': `bytes ${respRange}`, 'Content-Type': mime, 'Content-Length': respWidth.toString(), }; // HTTP 206 Partial Content // https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/206 res.writeHead(206, respHeaders); const stream = new Readable(); stream.push(content); stream.push(null); stream.pipe(res); } } async function uploadDeletionResponder( viewer: Viewer, request: UploadDeletionRequest, ): Promise { const { id } = request; await deleteUpload(viewer, id); } export { multerProcessor, multimediaUploadResponder, uploadDownloadResponder, uploadDeletionResponder, }; diff --git a/keyserver/src/utils/ens-cache.js b/keyserver/src/utils/ens-cache.js index 3e107fc01..22912ae5e 100644 --- a/keyserver/src/utils/ens-cache.js +++ b/keyserver/src/utils/ens-cache.js @@ -1,28 +1,28 @@ // @flow import { ethers } from 'ethers'; -import { ENSCache, type EthersProvider } from 'lib/utils/ens-cache'; +import { ENSCache, type EthersProvider } from 'lib/utils/ens-cache.js'; import { getENSNames as baseGetENSNames, type GetENSNames, -} from 'lib/utils/ens-helpers'; +} from 'lib/utils/ens-helpers.js'; const alchemyKey = process.env.COMM_ALCHEMY_KEY; let provider: ?EthersProvider; if (alchemyKey) { provider = new ethers.providers.AlchemyProvider('mainnet', alchemyKey); } let ensCache: ?ENSCache; if (provider) { ensCache = new ENSCache(provider); } let getENSNames: ?GetENSNames; if (ensCache) { getENSNames = baseGetENSNames.bind(null, ensCache); } export { provider, ensCache, getENSNames }; diff --git a/keyserver/src/utils/idempotent.js b/keyserver/src/utils/idempotent.js index 5c19036fe..cfffc8a99 100644 --- a/keyserver/src/utils/idempotent.js +++ b/keyserver/src/utils/idempotent.js @@ -1,20 +1,20 @@ // @flow -import type { Viewer } from '../session/viewer'; +import type { Viewer } from '../session/viewer.js'; function creationString(viewer: Viewer, localID: string): string { return `${viewer.session}|${localID}`; } function localIDFromCreationString( viewer: ?Viewer, ourCreationString: ?string, ): ?string { if (!ourCreationString || !viewer || !viewer.hasSessionInfo) { return null; } const [session, localID] = ourCreationString.split('|'); return session === viewer.session ? localID : null; } export { creationString, localIDFromCreationString }; diff --git a/keyserver/src/utils/olm-utils.js b/keyserver/src/utils/olm-utils.js index e6bc9ec0b..45599ee5f 100644 --- a/keyserver/src/utils/olm-utils.js +++ b/keyserver/src/utils/olm-utils.js @@ -1,18 +1,18 @@ // @flow import invariant from 'invariant'; -import { importJSON } from './import-json'; +import { importJSON } from './import-json.js'; type OlmConfig = { +picklingKey: string, +pickledAccount: string, }; async function getOlmConfig(): Promise { const olmConfig = await importJSON({ folder: 'secrets', name: 'olm_config' }); invariant(olmConfig, 'OLM config missing'); return olmConfig; } export { getOlmConfig }; diff --git a/keyserver/src/utils/relationship-changeset.js b/keyserver/src/utils/relationship-changeset.js index 4547db574..ac1499e35 100644 --- a/keyserver/src/utils/relationship-changeset.js +++ b/keyserver/src/utils/relationship-changeset.js @@ -1,100 +1,100 @@ // @flow import invariant from 'invariant'; -import { sortIDs } from 'lib/shared/relationship-utils'; +import { sortIDs } from 'lib/shared/relationship-utils.js'; import { type UndirectedRelationshipRow, undirectedStatus, -} from 'lib/types/relationship-types'; +} from 'lib/types/relationship-types.js'; type RelationshipStatus = 'existing' | 'potentially_missing'; class RelationshipChangeset { relationships: Map = new Map(); finalized: boolean = false; static _getKey(userA: string, userB: string): string { const [user1, user2] = sortIDs(userA, userB); return `${user1}|${user2}`; } _setRelationshipForKey(key: string, status: RelationshipStatus) { invariant( !this.finalized, 'attempting to set relationship on finalized RelationshipChangeset', ); const currentStatus = this.relationships.get(key); if ( currentStatus === 'existing' || (currentStatus && status === 'potentially_missing') ) { return; } this.relationships.set(key, status); } _setRelationship(userA: string, userB: string, status: RelationshipStatus) { if (userA === userB) { return; } const key = RelationshipChangeset._getKey(userA, userB); this._setRelationshipForKey(key, status); } setAllRelationshipsExist(userIDs: $ReadOnlyArray) { for (let i = 0; i < userIDs.length; i++) { for (let j = i + 1; j < userIDs.length; j++) { this._setRelationship(userIDs[i], userIDs[j], 'existing'); } } } setAllRelationshipsNeeded(userIDs: $ReadOnlyArray) { for (let i = 0; i < userIDs.length; i++) { for (let j = i + 1; j < userIDs.length; j++) { this._setRelationship(userIDs[i], userIDs[j], 'potentially_missing'); } } } setRelationshipExists(userA: string, userB: string) { this._setRelationship(userA, userB, 'existing'); } setRelationshipsNeeded(userID: string, otherUserIDs: $ReadOnlyArray) { for (const otherUserID of otherUserIDs) { this._setRelationship(userID, otherUserID, 'potentially_missing'); } } addAll(other: RelationshipChangeset) { other.finalized = true; for (const [key, status] of other.relationships) { this._setRelationshipForKey(key, status); } } _getRows(): UndirectedRelationshipRow[] { const rows = []; for (const [key, status] of this.relationships) { if (status === 'existing') { continue; } const [user1, user2] = key.split('|'); rows.push({ user1, user2, status: undirectedStatus.KNOW_OF }); } return rows; } getRows(): UndirectedRelationshipRow[] { this.finalized = true; return this._getRows(); } getRowCount(): number { return this._getRows().length; } } export default RelationshipChangeset; diff --git a/keyserver/src/utils/security-utils.js b/keyserver/src/utils/security-utils.js index 2229a1549..491c1d416 100644 --- a/keyserver/src/utils/security-utils.js +++ b/keyserver/src/utils/security-utils.js @@ -1,20 +1,20 @@ // @flow import type { $Request } from 'express'; -import { getAppURLFactsFromRequestURL } from './urls'; +import { getAppURLFactsFromRequestURL } from './urls.js'; function assertSecureRequest(req: $Request) { const { https, proxy } = getAppURLFactsFromRequestURL(req.originalUrl); if (!https) { return; } if ( (proxy === 'none' && req.protocol !== 'https') || (proxy === 'apache' && req.get('X-Forwarded-SSL') !== 'on') ) { throw new Error('insecure request'); } } export { assertSecureRequest }; diff --git a/keyserver/src/utils/urls.js b/keyserver/src/utils/urls.js index f1577d80b..8240c050f 100644 --- a/keyserver/src/utils/urls.js +++ b/keyserver/src/utils/urls.js @@ -1,94 +1,94 @@ // @flow import invariant from 'invariant'; -import { values } from 'lib/utils/objects'; +import { values } from 'lib/utils/objects.js'; -import { importJSON } from './import-json'; +import { importJSON } from './import-json.js'; export type AppURLFacts = { +baseDomain: string, +basePath: string, +https: boolean, +baseRoutePath: string, +proxy?: 'apache' | 'none', // defaults to apache }; const validProxies = new Set(['apache', 'none']); const sitesObj = Object.freeze({ a: 'landing', b: 'commapp', c: 'squadcal', }); export type Site = $Values; const sites: $ReadOnlyArray = values(sitesObj); const cachedURLFacts = new Map(); async function fetchURLFacts(site: Site): Promise { const existing = cachedURLFacts.get(site); if (existing !== undefined) { return existing; } let urlFacts: ?AppURLFacts = await importJSON({ folder: 'facts', name: `${site}_url`, }); if (urlFacts) { const { proxy } = urlFacts; urlFacts = { ...urlFacts, proxy: validProxies.has(proxy) ? proxy : 'apache', }; } cachedURLFacts.set(site, urlFacts); return urlFacts; } async function prefetchAllURLFacts() { await Promise.all(sites.map(fetchURLFacts)); } function getSquadCalURLFacts(): ?AppURLFacts { return cachedURLFacts.get('squadcal'); } function getCommAppURLFacts(): ?AppURLFacts { return cachedURLFacts.get('commapp'); } function getAndAssertCommAppURLFacts(): AppURLFacts { const urlFacts = getCommAppURLFacts(); invariant(urlFacts, 'keyserver/facts/commapp_url.json missing'); return urlFacts; } function getAppURLFactsFromRequestURL(url: string): AppURLFacts { const commURLFacts = getCommAppURLFacts(); if (commURLFacts && url.startsWith(commURLFacts.baseRoutePath)) { return commURLFacts; } const squadCalURLFacts = getSquadCalURLFacts(); if (squadCalURLFacts) { return squadCalURLFacts; } invariant(false, 'request received but no URL facts are present'); } function getLandingURLFacts(): ?AppURLFacts { return cachedURLFacts.get('landing'); } function getAndAssertLandingURLFacts(): AppURLFacts { const urlFacts = getLandingURLFacts(); invariant(urlFacts, 'keyserver/facts/landing_url.json missing'); return urlFacts; } export { prefetchAllURLFacts, getSquadCalURLFacts, getCommAppURLFacts, getAndAssertCommAppURLFacts, getLandingURLFacts, getAndAssertLandingURLFacts, getAppURLFactsFromRequestURL, }; diff --git a/keyserver/src/utils/validation-utils.js b/keyserver/src/utils/validation-utils.js index fd691c989..5c1f56490 100644 --- a/keyserver/src/utils/validation-utils.js +++ b/keyserver/src/utils/validation-utils.js @@ -1,187 +1,187 @@ // @flow import type { PolicyType } from 'lib/facts/policies.js'; import { hasMinCodeVersion } from 'lib/shared/version-utils.js'; -import { ServerError } from 'lib/utils/errors'; +import { ServerError } from 'lib/utils/errors.js'; import { tCookie, tPassword, tPlatform, tPlatformDetails, -} from 'lib/utils/validation-utils'; +} from 'lib/utils/validation-utils.js'; import { fetchNotAcknowledgedPolicies } from '../fetchers/policy-acknowledgment-fetchers.js'; -import { verifyClientSupported } from '../session/version'; -import type { Viewer } from '../session/viewer'; +import { verifyClientSupported } from '../session/version.js'; +import type { Viewer } from '../session/viewer.js'; async function validateInput(viewer: Viewer, inputValidator: *, input: *) { if (!viewer.isSocket) { await checkClientSupported(viewer, inputValidator, input); } checkInputValidator(inputValidator, input); } function checkInputValidator(inputValidator: *, input: *) { if (!inputValidator || inputValidator.is(input)) { return; } const error = new ServerError('invalid_parameters'); error.sanitizedInput = input ? sanitizeInput(inputValidator, input) : null; throw error; } async function checkClientSupported( viewer: Viewer, inputValidator: *, input: *, ) { let platformDetails; if (inputValidator) { platformDetails = findFirstInputMatchingValidator( inputValidator, tPlatformDetails, input, ); } if (!platformDetails && inputValidator) { const platform = findFirstInputMatchingValidator( inputValidator, tPlatform, input, ); if (platform) { platformDetails = { platform }; } } if (!platformDetails) { ({ platformDetails } = viewer); } await verifyClientSupported(viewer, platformDetails); } const redactedString = '********'; const redactedTypes = [tPassword, tCookie]; function sanitizeInput(inputValidator: *, input: *) { if (!inputValidator) { return input; } if (redactedTypes.includes(inputValidator) && typeof input === 'string') { return redactedString; } if ( inputValidator.meta.kind === 'maybe' && redactedTypes.includes(inputValidator.meta.type) && typeof input === 'string' ) { return redactedString; } if ( inputValidator.meta.kind !== 'interface' || typeof input !== 'object' || !input ) { return input; } const result = {}; for (const key in input) { const value = input[key]; const validator = inputValidator.meta.props[key]; result[key] = sanitizeInput(validator, value); } return result; } function findFirstInputMatchingValidator( wholeInputValidator: *, inputValidatorToMatch: *, input: *, ): any { if (!wholeInputValidator || input === null || input === undefined) { return null; } if ( wholeInputValidator === inputValidatorToMatch && wholeInputValidator.is(input) ) { return input; } if (wholeInputValidator.meta.kind === 'maybe') { return findFirstInputMatchingValidator( wholeInputValidator.meta.type, inputValidatorToMatch, input, ); } if ( wholeInputValidator.meta.kind === 'interface' && typeof input === 'object' ) { for (const key in input) { const value = input[key]; const validator = wholeInputValidator.meta.props[key]; const innerResult = findFirstInputMatchingValidator( validator, inputValidatorToMatch, value, ); if (innerResult) { return innerResult; } } } if (wholeInputValidator.meta.kind === 'union') { for (const validator of wholeInputValidator.meta.types) { if (validator.is(input)) { return findFirstInputMatchingValidator( validator, inputValidatorToMatch, input, ); } } } if (wholeInputValidator.meta.kind === 'list' && Array.isArray(input)) { const validator = wholeInputValidator.meta.type; for (const value of input) { const innerResult = findFirstInputMatchingValidator( validator, inputValidatorToMatch, value, ); if (innerResult) { return innerResult; } } } return null; } async function policiesValidator( viewer: Viewer, policies: $ReadOnlyArray, ) { if (!policies.length) { return; } if (!hasMinCodeVersion(viewer.platformDetails, 181)) { return; } const notAcknowledgedPolicies = await fetchNotAcknowledgedPolicies( viewer.id, policies, ); if (notAcknowledgedPolicies.length) { throw new ServerError('policies_not_accepted', { notAcknowledgedPolicies, }); } } export { validateInput, checkInputValidator, checkClientSupported, policiesValidator, }; diff --git a/landing/app-landing.react.js b/landing/app-landing.react.js index 51894b69d..56855a272 100644 --- a/landing/app-landing.react.js +++ b/landing/app-landing.react.js @@ -1,42 +1,42 @@ // @flow import * as React from 'react'; -import { assetMetaData } from './asset-meta-data'; -import HeroContent from './hero-content.react'; -import InfoBlock from './info-block.react'; +import { assetMetaData } from './asset-meta-data.js'; +import HeroContent from './hero-content.react.js'; +import InfoBlock from './info-block.react.js'; import css from './landing.css'; -import Picture from './Picture.react'; -import StarBackground from './star-background.react'; -import usePreloadAssets from './use-pre-load-assets.react'; +import Picture from './Picture.react.js'; +import StarBackground from './star-background.react.js'; +import usePreloadAssets from './use-pre-load-assets.react.js'; function AppLanding(): React.Node { usePreloadAssets(assetMetaData); const [ hero, federated, customizable, encrypted, sovereign, openSource, lessNoisy, ] = assetMetaData; return (
); } export default AppLanding; diff --git a/landing/footer.react.js b/landing/footer.react.js index 195cc2deb..9590057c5 100644 --- a/landing/footer.react.js +++ b/landing/footer.react.js @@ -1,69 +1,69 @@ // @flow import { faExternalLinkSquareAlt } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import * as React from 'react'; import { NavLink } from 'react-router-dom'; import css from './footer.css'; -import SubscriptionForm from './subscription-form.react'; +import SubscriptionForm from './subscription-form.react.js'; const navLinkProps = { activeStyle: { color: 'white', fontWeight: '500', }, }; function Footer(): React.Node { return ( ); } export default Footer; diff --git a/landing/hero-content.react.js b/landing/hero-content.react.js index ad51b7322..9b1c198fb 100644 --- a/landing/hero-content.react.js +++ b/landing/hero-content.react.js @@ -1,36 +1,36 @@ // @flow import * as React from 'react'; import TextLoop from 'react-text-loop'; -import { assetMetaData } from './asset-meta-data'; +import { assetMetaData } from './asset-meta-data.js'; import css from './hero-content.css'; -import SubscriptionForm from './subscription-form.react'; +import SubscriptionForm from './subscription-form.react.js'; function HeroContent(): React.Node { const [hero] = assetMetaData; return (

{'Comm is crypto-native\nchat for '} DAOs venture funds open source gaming guilds social clubs

(think "Web3 Discord")

); } export default HeroContent; diff --git a/landing/info-block.react.js b/landing/info-block.react.js index 33fe3dd29..6ad5a36ee 100644 --- a/landing/info-block.react.js +++ b/landing/info-block.react.js @@ -1,32 +1,32 @@ // @flow import * as React from 'react'; import css from './info-block.css'; -import Picture from './Picture.react'; +import Picture from './Picture.react.js'; type InfoBlockProps = { +title: string, +description: string, +url: string, +alt: string, +imageStyle: string, +infoStyle: string, }; function InfoBlock(props: InfoBlockProps): React.Node { const { title, description, url, alt, imageStyle, infoStyle } = props; return ( <>

{title}

{description}

); } export default InfoBlock; diff --git a/landing/investor-data.js b/landing/investor-data.js index da87ef78f..0bd15d477 100644 --- a/landing/investor-data.js +++ b/landing/investor-data.js @@ -1,790 +1,790 @@ // @flow -import _keyBy from 'lodash/fp/keyBy'; -import _shuffle from 'lodash/fp/shuffle'; +import _keyBy from 'lodash/fp/keyBy.js'; +import _shuffle from 'lodash/fp/shuffle.js'; -import { assetsCacheURLPrefix } from './asset-meta-data'; +import { assetsCacheURLPrefix } from './asset-meta-data.js'; type Investors = { +id: string, +name: string, +description: string, +involvement?: string, +imageURL: string, +website?: string, +twitter?: string, +linkedin?: string, }; const investorsData: $ReadOnlyArray = [ { id: 'ashoat_tevosyan', name: 'Ashoat Tevosyan', description: 'Founder of Comm. Learned to code modding PHP forums in the mid-2000s. Joined Facebook full-time at age 20 with last role as EM.', involvement: 'Initially Invested in May 2020', imageURL: `${assetsCacheURLPrefix}/ashoat.png`, website: 'https://site.ashoat.com', twitter: 'ashoat', linkedin: 'in/ashoatt', }, { id: 'slow_ventures', name: 'Slow Ventures', description: 'Slow Ventures is a generalist fund that backs founders from the earliest days. Slow has been heavily investing in and around the crypto space for the last 5+ years.', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/slow_ventures.jpeg`, website: 'https://slow.co', twitter: 'slow', linkedin: 'company/slow-ventures', }, { id: 'electric_capital', name: 'Electric Capital', description: 'An early stage venture firm focused on cryptocurrencies, blockchain, fintech, and marketplaces.', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/electric_capital.jpeg`, twitter: 'ElectricCapital', linkedin: 'company/electric-capital', }, { id: 'graph_ventures', name: 'Graph Ventures', description: 'Early-stage VC firm established in 2011 by leading technology entrepreneurs and executives with 300+ global investments.', involvement: 'Initially Invested in Nov 2021', imageURL: `${assetsCacheURLPrefix}/investors/graph_ventures.jpeg`, website: 'https://www.graphventures.com', twitter: 'graphventures', linkedin: 'company/graph-ventures', }, { id: 'draft_vc', name: 'Draft VC', description: 'Seed stage venture fund focused on Web3, climate, proptech, and fintech.', involvement: 'Initially Invested in Dec 2021', imageURL: `${assetsCacheURLPrefix}/investors/draft_vc.jpeg`, website: 'https://draftvc.com', twitter: 'draftvc', linkedin: 'company/draft-ventures', }, { id: 'd1_ventures', name: 'D1 Ventures', description: 'D1 Ventures is a private investment firm backing early stage crypto native infrastructures and applications.', involvement: 'Initially Invested in Feb 2022', imageURL: `${assetsCacheURLPrefix}/investors/d1_ventures.jpeg`, website: 'https://www.d1.ventures', twitter: 'd1ventures', linkedin: 'company/d1-ventures', }, { id: 'eniac_ventures', name: 'Eniac Ventures', description: 'Leads pre-seed & seed rounds in bold founders who use code to create transformational companies.', involvement: 'Initially Invested in Feb 2022', imageURL: `${assetsCacheURLPrefix}/investors/eniac_ventures.jpeg`, website: 'https://eniac.vc', twitter: 'EniacVC', linkedin: 'company/eniacvc', }, { id: 'seed_club_ventures', name: 'Seed Club Ventures', description: 'Venture DAO that invests in early-stage projects building and enabling a community-owned internet. SCV believes community ownership is the superpower of Web3 and is guided by the principles of inclusivity and opportunity for all participants.', involvement: 'Initially Invested in Dec 2021', imageURL: `${assetsCacheURLPrefix}/investors/seed_club_ventures.jpeg`, twitter: 'seedclubvc', }, { id: 'metaweb_ventures', name: 'MetaWeb Ventures', description: 'Global crypto firm investing in the future of Web3.', involvement: 'Initially Invested in Feb 2022', imageURL: `${assetsCacheURLPrefix}/investors/metaweb_ventures.jpeg`, website: 'https://www.metaweb.vc', twitter: 'MetaWebVC', }, { id: 'coinfund', name: 'CoinFund', description: 'CoinFund is a web3 and crypto focused investment firm and registered investment adviser founded in 2015 with the goal of shaping the global transition to web3. The firm invests in seed, venture and liquid opportunities within the blockchain sector with a focus on digital assets, decentralization technologies, and key enabling infrastructure.', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/coinfund.jpeg`, website: 'https://www.coinfund.io', twitter: 'coinfund_io', linkedin: 'company/coinfund', }, { id: 'shima_capital', name: 'Shima Capital', description: 'An early-stage global venture firm focused on supporting cutting edge blockchain startups.', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/shima_capital.jpeg`, website: 'https://shima.capital', twitter: 'shimacapital', linkedin: 'company/shima-capital', }, { id: 'republic_capital', name: 'Republic Capital', description: 'Republic Capital is a multi-stage venture capital firm focused on accelerating disruptive innovations.', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/republic_capital.jpeg`, website: 'https://www.republiccapital.co', twitter: '_rcapital_', linkedin: 'company/republic-capital', }, { id: 'global_coin_research', name: 'Global Coin Research', description: 'Investment & Research DAO focused on Web3', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/global_coin_research.jpeg`, website: 'https://globalcoinresearch.com', twitter: 'Globalcoinrsrch', }, { id: '3se_holdings', name: '3SE Holdings', description: 'Crypto-Native Operators Fund', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/3se_holdings.jpeg`, website: 'https://3seholdings.com', twitter: '3SEHoldings', }, { id: 'vibe_capital', name: 'Vibe Capital', description: 'Vibecap is a $10m Pre-Seed and Seed stage fund that invests in Deep Sci, AI, and Web3.', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/vibe_capital.jpeg`, website: 'https://vibecap.co', twitter: 'vibe_cap', }, { id: 'longhash_ventures', name: 'LongHash Ventures', description: "Asia's leading Web3 investment fund and accelerator collaborating with founders to build their Web3 model and tap into the vast potential of Asia.", involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/longhash_ventures.jpeg`, website: 'https://longhash.vc', twitter: 'LongHashVC', }, { id: 'micheal_stoppelman', name: 'Michael Stoppelman', description: 'Investor in Flexport, Vanta, and Benchling, Former SVP of Engineering at Yelp & ex-Google Software Engineer', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/micheal_stoppelman.jpeg`, twitter: 'stopman', linkedin: 'in/michaelstoppelman', }, { id: 'hursh_agrawal', name: 'Hursh Agrawal', description: 'Cofounder of The Browser Company', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/hursh_agrawal.jpeg`, twitter: 'hursh', linkedin: 'in/hurshagrawal', }, { id: 'adam_midvidy', name: 'Adam Midvidy', description: 'Developer at Jane Street', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/adam_midvidy.jpeg`, twitter: 'amidvidy', }, { id: 'dan_shipper', name: 'Dan Shipper', description: 'CEO of Every', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/dan_shipper.jpeg`, twitter: 'danshipper', linkedin: 'in/danshipper', }, { id: 'mary_pimenova', name: 'Mary Pimenova', description: 'Senior Engineering Manager at Robinhood', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/mary_pimenova.jpeg`, linkedin: 'in/mpimenova', }, { id: 'ranjan_pradeep', name: 'Ranjan Pradeep', description: 'Software Engineer at Microsoft', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/ranjan_pradeep.jpeg`, }, { id: 'tyler_menezes', name: 'Tyler Menezes', description: 'Helping kids find a place in tech at CodeDay. Forbes 30 Under 30, 425 Mag 30 Under 30. Tech & Learning Influential in EdTech.', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/tyler_menezes.jpeg`, twitter: 'tylermenezes', linkedin: 'in/tylermenezes', }, { id: 'alex_esibov', name: 'Alex Esibov', description: 'Principal Lead Product Manager at Microsoft', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/alex_esibov.jpeg`, twitter: 'AlexEsibov', linkedin: 'in/alexesibov', }, { id: 'inna_turshudzhyan', name: 'Inna Turshudzhyan', description: 'Senior Software Engineer at Microsoft', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/inna_turshudzhyan.jpeg`, }, { id: 'nick_mauro', name: 'Nick Mauro', description: 'Comm is building a messaging service that puts encryption and decentralization first. Thrilled to be among the group of early investors.', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/nick_mauro.jpeg`, twitter: '0x7BA086', }, { id: 'josh_kornreich', name: 'Josh Kornreich', description: 'Managing Partner at Unit Engineering Group and CTO at Ignite Tournaments', involvement: 'Initially Invested in May 2020', imageURL: `${assetsCacheURLPrefix}/investors/josh_kornreich.jpeg`, linkedin: 'in/joshuakornreich', }, { id: 'lucas_lowman', name: 'Lucas Lowman', description: 'Creative Director at Weirdbreak', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/lucas_lowman.jpeg`, linkedin: 'in/lucaslowman', }, { id: 'jonathan_shi', name: 'Jonathan Shi', description: 'Postdoctoral Researcher', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/jonathan_shi.jpeg`, website: 'https://www.jshi.science', twitter: 'jtnshi', }, { id: 'larry_fenn', name: 'Larry Fenn', description: 'Data journalist at Associated Press', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/larry_fenn.jpeg`, website: 'https://larryfenn.com', }, { id: 'dave_lowman', name: 'Dave Lowman', description: 'Senior Financial Services Executive, Board Member and Entrepreneur', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/dave_lowman.jpeg`, linkedin: 'in/dave-lowman-a90bb81', }, { id: 'jason_yeh', name: 'Jason Yeh', description: 'Founder at Adamant. Previously at Greycroft VC.', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/jason_yeh.jpeg`, twitter: 'jayyeh', }, { id: 'blake_embrey', name: 'Blake Embrey', description: 'Senior Software Engineer at Opendoor.com', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/blake_embrey.jpeg`, website: 'http://blakeembrey.me', twitter: 'blakeembrey', linkedin: 'in/blakeembrey', }, { id: 'ted_kalaw', name: 'Ted Kalaw', description: 'Software Engineer at Yoz Labs. Previously Senior Software Engineer at UnitedMasters and Facebook.', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/ted_kalaw.jpeg`, linkedin: 'in/ted-kalaw-791a0541', }, { id: 'jack_arenas', name: 'Jack Arenas', description: 'Co-founder CTO at Modern Life and Co-founder at Petal. Previously Goldman Sachs and Amazon.', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/jack_arenas.jpeg`, twitter: 'jackarenas', linkedin: 'in/jackarenas', }, { id: 'dave_schatz', name: 'Dave Schatz', description: 'Entrepreneur, engineer, blockchain dev, angel investor. Previously at Circles For Zoom and Facebook.', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/dave_schatz.jpeg`, twitter: 'daveschatz', linkedin: 'in/daveschatz', }, { id: 'michelle_nacouzi', name: 'Michelle Nacouzi', description: 'Early-stage VC at Northzone', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/michelle_nacouzi.jpeg`, twitter: 'MichelleNacouzi', }, { id: 'rousseau_kazi', name: 'Rousseau Kazi', description: 'CEO at Threads', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/rousseau_kazi.jpeg`, twitter: 'rousseaukazi', linkedin: 'in/rousseaukazi', }, { id: 'liu_jiang', name: 'Liu Jiang', description: 'Investor and Advisor', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/liu_jiang.jpeg`, linkedin: 'in/liujiang1', }, { id: 'jan_karl_driscoll', name: 'Jan-Karl Driscoll', description: 'Software Engineer and Architect. Previously at BounceX.', involvement: 'Initially Invested in Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/jan_karl_driscoll.jpeg`, linkedin: 'in/jan-karl-driscoll-91254a3', }, { id: 'tess_rinearson', name: 'Tess Rinearson', description: 'Leads Blockchain at Twitter. Previously the VP of Engineering at the Interchain Foundation.', involvement: 'Initially Invested in Mar 2021', imageURL: `${assetsCacheURLPrefix}/investors/tess_rinearson.jpeg`, twitter: '_tessr', linkedin: 'in/temiri', }, { id: 'ashwin_bhat', name: 'Ashwin Bhat', description: 'Engineering Manager at Loom', involvement: 'Initially Invested in Mar 2021', imageURL: `${assetsCacheURLPrefix}/investors/ashwin_bhat.jpeg`, twitter: 'swac', linkedin: 'in/ashwin-bhat-23573222', }, { id: 'lev_dubinets', name: 'Lev Dubinets', description: 'Enerineering Manager at Mercury', imageURL: `${assetsCacheURLPrefix}/investors/lev_dubinets.jpeg`, twitter: 'LevDubinets', }, { id: 'charlie_songhurst', name: 'Charlie Songhurst', description: 'Private investor in tech companies. Previously at Microsoft and McKinsey.', involvement: 'Initially Invested in Mar 2021', imageURL: `${assetsCacheURLPrefix}/investors/charlie_songhurst.jpeg`, linkedin: 'in/charlessonghurst', }, { id: 'edward_lando', name: 'Edward Lando', description: 'Managing Partner at Pareto Holdings', involvement: 'Initially Invested in Mar 2021', imageURL: `${assetsCacheURLPrefix}/investors/edward_lando.jpeg`, twitter: 'edwardlando', linkedin: 'in/edwardlando', }, { id: 'alina_libova_cohen', name: 'Alina Libova Cohen', description: 'Angel Investor', involvement: 'Initially Invested in Mar 2021', imageURL: `${assetsCacheURLPrefix}/investors/alina_libova_cohen.jpeg`, twitter: 'alina_libova', }, { id: 'kahren_tevosyan', name: 'Kahren Tevosyan', description: 'VP of Engineering at Microsoft', involvement: 'Initially Invested in Mar 2021', imageURL: `${assetsCacheURLPrefix}/investors/kahren_tevosyan.jpeg`, }, { id: 'anna_barhudarian', name: 'Anna Barhudarian', description: "Ashoat's mother and Principal PM Manager at Microsoft", involvement: 'Initially Invested in Mar 2021', imageURL: `${assetsCacheURLPrefix}/investors/anna_barhudarian.jpeg`, }, { id: 'jim_posen', name: 'Jim Posen', description: 'Cryptography engineer. Previously technical lead at Coinbase.', involvement: 'Initially Invested in Nov 2021', imageURL: `${assetsCacheURLPrefix}/investors/jim_posen.jpeg`, twitter: 'jimpo_potamus', linkedin: 'in/jimpo', }, { id: 'chet_corcos', name: 'Chet Corcos', description: 'Previously engineer at Notion, Affirm, and SpaceX', involvement: 'Initially Invested in Nov 2021', imageURL: `${assetsCacheURLPrefix}/investors/chet_corcos.jpeg`, website: 'http://chetcorcos.com', twitter: 'ccorcos', }, { id: 'eric_siu', name: 'Eric Siu', description: 'Founder at Single Grain. Investor and creator at Leveling up Heroes.', involvement: 'Initially Invested in Nov 2021', imageURL: `${assetsCacheURLPrefix}/investors/eric_siu.jpeg`, twitter: 'ericosiu', linkedin: 'in/ericosiu', }, { id: 'gmoney', name: 'gmoney', description: 'Founder at Admit One and 9dcc', involvement: 'Initially Invested in Nov 2021', imageURL: `${assetsCacheURLPrefix}/investors/gmoney.jpeg`, twitter: 'gmoneyNFT', }, { id: 'dylan_portelance', name: 'Dylan Portelance', description: 'Product Growth at Photomath', involvement: 'Initially Invested in Nov 2021', imageURL: `${assetsCacheURLPrefix}/investors/dylan_portelance.jpeg`, twitter: 'dylanjpo', linkedin: 'in/dylanportelance', }, { id: 'lisa_xu', name: 'Lisa Xu', description: 'VC at FirstMark investing in early stage consumer and web3 startups. Co-host of Crypto Driven.', involvement: 'Initially Invested in Nov 2021', imageURL: `${assetsCacheURLPrefix}/investors/lisa_xu.jpeg`, twitter: 'lisamxu', }, { id: 'mark_mullen', name: 'Mark Mullen', description: 'Managing Partner at Double M and Co-Founder at Bonfire Ventures', involvement: 'Initially Invested in Nov 2021', imageURL: `${assetsCacheURLPrefix}/investors/mark_mullen.jpeg`, website: 'https://www.bonfirevc.com/team/mark-mullen', }, { id: 'reuben_bramanathan', name: 'Reuben Bramanathan', description: 'General Partner at IDEO CoLab Ventures. Previously Head of Asset Management and Product Counsel at Coinbase.', involvement: 'Initially Invested in Nov 2021', imageURL: `${assetsCacheURLPrefix}/investors/reuben_bramanathan.jpeg`, twitter: 'bramanathan', linkedin: 'in/rbramanathan', }, { id: 'balaji_srinivasan', name: 'Balaji Srinivasan', description: 'Author of The Network State. Formerly the CTO of Coinbase and General Partner at Andreessen Horowitz.', involvement: 'Initially Invested in Nov 2021', imageURL: `${assetsCacheURLPrefix}/investors/balaji_srinivasan.jpeg`, twitter: 'balajis', }, { id: 'david_rodriguez', name: 'David Rodriguez', description: 'Co-Founder and Managing Partner at Draft Ventures', involvement: 'Initially Invested in Dec 2021', imageURL: `${assetsCacheURLPrefix}/investors/david_rodriguez.jpeg`, twitter: 'davidjrodriguez', linkedin: 'in/davidjrodriguez', }, { id: 'artia_moghbel', name: 'Artia Moghbel', description: 'Co-founder at Draft Ventures. Previously the COO at DFINITY.', involvement: 'Initially Invested in Dec 2021', imageURL: `${assetsCacheURLPrefix}/investors/artia_moghbel.jpeg`, twitter: 'artia', linkedin: 'in/artiam', }, { id: 'grant_gittlin', name: 'Grant Gittlin', description: 'Investor & Former CGO at MediaLink', involvement: 'Initially Invested in Jan 2022', imageURL: `${assetsCacheURLPrefix}/investors/grant_gittlin.jpeg`, linkedin: 'in/grantgittlin', }, { id: 'julian_weisser', name: 'Julian Weisser', description: 'Co-Founder of On Deck. Ex-core at Constitution DAO and GP at Other Ventures.', involvement: 'Initially Invested in Jan 2022', imageURL: `${assetsCacheURLPrefix}/investors/julian_weisser.jpeg`, twitter: 'julianweisser', linkedin: 'in/julianweisser', }, { id: 'ethan_beard', name: 'Ethan Beard', description: 'Co-Founder at Yoz Labs. Previously Senior VP at Ripple.', involvement: 'Initially Invested in Jan 2022', imageURL: `${assetsCacheURLPrefix}/investors/ethan_beard.jpeg`, twitter: 'ethanbeard', linkedin: 'in/ethanbeard', }, { id: 'tim_chen', name: 'Tim Chen', description: 'General Partner at Essence VC. Co-host of the Open Source Startup Podcast.', involvement: 'Initially Invested in Jan 2022', imageURL: `${assetsCacheURLPrefix}/investors/tim_chen.jpeg`, twitter: 'tnachen', linkedin: 'in/timchen', }, { id: 'jennifer_liu', name: 'Jennifer Liu', description: 'Founding Partner at D1 Ventures.', involvement: 'Initially Invested in Jan 2022', imageURL: `${assetsCacheURLPrefix}/investors/jennifer_liu.jpeg`, }, { id: 'tamara_frankel', name: 'Tamara Frankel', description: 'Founding Partner at D1 Ventures. Previously Founding Partner at Azoth Group.', involvement: 'Initially Invested in Feb 2022', imageURL: `${assetsCacheURLPrefix}/investors/tamara_frankel.jpeg`, linkedin: 'in/tamara-based-jpegs', }, { id: 'ahmed_jafri', name: 'Ahmed Jafri', description: 'Engineering Manager at Meta', involvement: 'Initially Invested in Jan 2022', imageURL: `${assetsCacheURLPrefix}/investors/ahmed_jafri.jpeg`, twitter: 'ahmedjafrii', linkedin: 'in/ahmedjafrii', }, { id: 'aksel_piran', name: 'Aksel Piran', description: 'Founder at CP3 Ventures and The Syndicate by BANA', involvement: 'Initially Invested in Jan 2022', imageURL: `${assetsCacheURLPrefix}/investors/aksel_piran.jpeg`, linkedin: 'in/apiran', }, { id: 'paul_veradittakit', name: 'Paul Veradittakit', description: 'Investor at Pantera Capital', involvement: 'Initially Invested in Feb 2022', imageURL: `${assetsCacheURLPrefix}/investors/paul_veradittakit.jpeg`, twitter: 'veradittakit', linkedin: 'in/veradittakit', }, { id: 'ammar_karmali', name: 'Ammar Karmali', description: 'Investment Banking Associate at Gordon Dyal & Co. Advisory Group LP', involvement: 'Initially Invested in Feb 2022', imageURL: `${assetsCacheURLPrefix}/investors/ammar_karmali.jpeg`, linkedin: 'in/ammar-karmali-658b10b4', }, { id: 'avi_zurlo', name: 'Avi Zurlo', description: 'Ventures Associate at Delphi Digital', involvement: 'Initially Invested in Feb 2022', imageURL: `${assetsCacheURLPrefix}/investors/avi_zurlo.jpeg`, twitter: 'thejewforu', linkedin: 'in/avi-zurlo-2b0760104', }, { id: 'tom_shaughnessy', name: 'Tom Shaughnessy', description: 'Co-Founder at Delphi Digital. Host of The Delphi Podcast.', involvement: 'Initially Invested in Feb 2022', imageURL: `${assetsCacheURLPrefix}/investors/tom_shaughnessy.jpeg`, twitter: 'Shaughnessy119', linkedin: 'in/tom-shaughnessy-jr-2572a220', }, { id: 'yan_liberman', name: 'Yan Liberman', description: 'Co-Founder at Delphi Digital', involvement: 'Initially Invested in Feb 2022', imageURL: `${assetsCacheURLPrefix}/investors/yan_liberman.jpeg`, twitter: 'YanLiberman', linkedin: 'in/yanliberman', }, { id: 'faizan_khan', name: 'Faizan Khan', description: 'Founder, Managing Director at Visary Capital', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/faizan_khan.jpeg`, linkedin: 'in/faizanjkhan', }, { id: 'lane_rettig', name: 'Lane Rettig', description: 'Core team at teamspacemesh. Previously Ethereum Core Dev.', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/lane_rettig.jpeg`, twitter: 'lrettig', linkedin: 'in/lane-rettig-32904b227', }, { id: 'lon_lundgren', name: 'Lon Lundgren', description: 'Founder at Galactical and Ocelot. Previously at AWS and Microsoft.', imageURL: `${assetsCacheURLPrefix}/investors/lon_lundgren.jpeg`, linkedin: 'in/lonlundgren', }, { id: 'will_papper', name: 'Will Papper', description: 'Co-Founder at Syndicate DAO. Core at Constitution DAO.', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/will_papper.jpeg`, twitter: 'WillPapper', }, { id: 'sida_li', name: 'Sida Li', description: 'Strategy at SyndicateDAO. Previously built ventures at IDEO and Atomic.', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/sida_li.jpeg`, twitter: 'Sidaelle', linkedin: 'in/sida-li-35729698', }, { id: 'reverie', name: 'Reverie', description: 'Reverie helps DAOs grow', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/reverie.jpeg`, website: 'https://www.reverie.ooo', twitter: 'hi_reverie', linkedin: 'company/reveriereserves', }, { id: 'patricio_worthalter', name: 'Patricio Worthalter', description: 'Founder at POAP', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/patricio_worthalter.jpeg`, twitter: 'in/worthalter', }, { id: 'andrew_green', name: 'Andrew Green', description: 'Co-Founder & CEO of Strider. Previously, Partner at Andreessen Horowitz.', involvement: 'Initially Invested in July 2022', imageURL: `${assetsCacheURLPrefix}/investors/andrew_green.jpeg`, linkedin: 'in/andrewngreen10', }, { id: 'taylor_rogalski', name: 'Taylor Rogalski', description: 'Former Product Designer at Facebook, Pioneerp.app, and ClassDojo', involvement: 'Advisor since Feb 2021', imageURL: `${assetsCacheURLPrefix}/investors/taylor_rogalski.jpeg`, twitter: 'tayroga', linkedin: 'in/taylor-rogalski-4b169767', }, { id: 'julia_lipton', name: 'Julia Lipton', description: 'Investing in Web3 at Awesome People Ventures', imageURL: `${assetsCacheURLPrefix}/investors/julia_lipton.jpeg`, involvement: 'Advisor since Oct 2021', twitter: 'JuliaLipton', linkedin: 'in/julialipton', }, { id: 'varun_dhananjaya', name: 'Varun Dhananjaya', description: 'Software Engineer at Comm', involvement: 'Joined July 2022', imageURL: `${assetsCacheURLPrefix}/investors/varun_dhananjaya.jpeg`, }, { id: 'mark_rafferty', name: 'Mark Rafferty', description: 'Recruiter at Comm', involvement: 'Joined Feb 2022', imageURL: `${assetsCacheURLPrefix}/investors/mark_rafferty.jpeg`, twitter: 'markraff', }, ]; const shuffledInvestorsData: $ReadOnlyArray = _shuffle( investorsData, ); const keyedInvestorData: { [key: string]: Investors } = _keyBy('id')( investorsData, ); export { shuffledInvestorsData, keyedInvestorData }; diff --git a/landing/investor-profile-modal.react.js b/landing/investor-profile-modal.react.js index 662456ee8..99181f4d6 100644 --- a/landing/investor-profile-modal.react.js +++ b/landing/investor-profile-modal.react.js @@ -1,48 +1,48 @@ // @flow import * as React from 'react'; -import ModalOverlay from 'lib/components/modal-overlay.react'; -import { useModalContext } from 'lib/components/modal-provider.react'; +import ModalOverlay from 'lib/components/modal-overlay.react.js'; +import { useModalContext } from 'lib/components/modal-provider.react.js'; -import { keyedInvestorData } from './investor-data'; +import { keyedInvestorData } from './investor-data.js'; import css from './investor-profile-modal.css'; -import InvestorProfile from './investor-profile.react'; +import InvestorProfile from './investor-profile.react.js'; type Props = { +investorID: string, }; function InvestorProfileModal(props: Props): React.Node { const { investorID } = props; const { popModal } = useModalContext(); const selectedInvestor = React.useMemo(() => { const foundInvestor = keyedInvestorData[investorID]; if (!foundInvestor) { return; } return ( ); }, [investorID, popModal]); return (
{selectedInvestor}
); } export default InvestorProfileModal; diff --git a/landing/investors.react.js b/landing/investors.react.js index a20bdee3a..06e3bd482 100644 --- a/landing/investors.react.js +++ b/landing/investors.react.js @@ -1,56 +1,56 @@ // @flow import * as React from 'react'; -import { useModalContext } from 'lib/components/modal-provider.react'; +import { useModalContext } from 'lib/components/modal-provider.react.js'; -import { shuffledInvestorsData } from './investor-data'; -import InvestorProfileModal from './investor-profile-modal.react'; -import InvestorProfile from './investor-profile.react'; +import { shuffledInvestorsData } from './investor-data.js'; +import InvestorProfileModal from './investor-profile-modal.react.js'; +import InvestorProfile from './investor-profile.react.js'; import css from './investors.css'; function Investors(): React.Node { const { pushModal } = useModalContext(); const onClickInvestorProfileCard = React.useCallback( (id: string) => { pushModal(); }, [pushModal], ); const investors = React.useMemo(() => { return shuffledInvestorsData.map(investor => ( onClickInvestorProfileCard(investor.id)} website={investor.website} twitterHandle={investor.twitter} linkedinHandle={investor.linkedin} /> )); }, [onClickInvestorProfileCard]); return (

Investors

Comm is proud to count over 80 individuals & organizations from our community as investors.

{investors}
); } export default Investors; diff --git a/landing/keyservers.react.js b/landing/keyservers.react.js index 053d91a62..5373f374f 100644 --- a/landing/keyservers.react.js +++ b/landing/keyservers.react.js @@ -1,147 +1,147 @@ // @flow import { create } from '@lottiefiles/lottie-interactivity'; import * as React from 'react'; -import { useIsomorphicLayoutEffect } from 'lib/hooks/isomorphic-layout-effect.react'; +import { useIsomorphicLayoutEffect } from 'lib/hooks/isomorphic-layout-effect.react.js'; -import { assetsCacheURLPrefix } from './asset-meta-data'; +import { assetsCacheURLPrefix } from './asset-meta-data.js'; import css from './keyservers.css'; -import ReadDocsButton from './read-docs-btn.react'; -import StarBackground from './star-background.react'; +import ReadDocsButton from './read-docs-btn.react.js'; +import StarBackground from './star-background.react.js'; function Keyservers(): React.Node { React.useEffect(() => { import('@lottiefiles/lottie-player'); }, []); const onEyeIllustrationLoad = React.useCallback(() => { create({ mode: 'scroll', player: '#eye-illustration', actions: [ { visibility: [0, 1], type: 'seek', frames: [0, 720], }, ], }); }, []); const onCloudIllustrationLoad = React.useCallback(() => { create({ mode: 'scroll', player: '#cloud-illustration', actions: [ { visibility: [0, 0.2], type: 'stop', frames: [0], }, { visibility: [0.2, 1], type: 'seek', frames: [0, 300], }, ], }); }, []); const [eyeNode, setEyeNode] = React.useState(null); useIsomorphicLayoutEffect(() => { if (!eyeNode) { return; } eyeNode.addEventListener('load', onEyeIllustrationLoad); return () => eyeNode.removeEventListener('load', onEyeIllustrationLoad); }, [eyeNode, onEyeIllustrationLoad]); const [cloudNode, setCloudNode] = React.useState(null); useIsomorphicLayoutEffect(() => { if (!cloudNode) { return; } cloudNode.addEventListener('load', onCloudIllustrationLoad); return () => cloudNode.removeEventListener('load', onCloudIllustrationLoad); }, [cloudNode, onCloudIllustrationLoad]); return (

Reclaim your digital identity.

The Internet is broken today. Private user data is owned by mega-corporations and farmed for their benefit.

E2E encryption has the potential to change this equation. But it's constrained by a crucial limitation.

Apps need servers.

Sophisticated applications rely on servers to do things that your devices simply can't.

That's why E2E encryption only works for simple chat apps today. There's no way to build a robust server layer that has access to your data without leaking that data to corporations.

Comm {' '} is the keyserver{' '} company.

In the future, people have their own servers.

Your keyserver is the home of your digital identity. It owns your private keys and your personal data. It's your password manager, your crypto bank, your digital surrogate, and your second brain.

); } export default Keyservers; diff --git a/landing/landing-ssr.react.js b/landing/landing-ssr.react.js index 9c8d10f2c..8d10aa229 100644 --- a/landing/landing-ssr.react.js +++ b/landing/landing-ssr.react.js @@ -1,35 +1,35 @@ // @flow import * as React from 'react'; import { StaticRouter } from 'react-router'; -import Landing from './landing.react'; +import Landing from './landing.react.js'; import { SIWEContext } from './siwe-context.js'; export type LandingSSRProps = { +url: string, +basename: string, +siweNonce: ?string, +siwePrimaryIdentityPublicKey: ?string, }; function LandingSSR(props: LandingSSRProps): React.Node { const { url, basename, siweNonce, siwePrimaryIdentityPublicKey } = props; const siweContextValue = React.useMemo( () => ({ siweNonce, siwePrimaryIdentityPublicKey, }), [siweNonce, siwePrimaryIdentityPublicKey], ); const routerContext = React.useMemo(() => ({}), []); return ( ); } export default LandingSSR; diff --git a/landing/landing.react.js b/landing/landing.react.js index d0f9ebd52..1113cfceb 100644 --- a/landing/landing.react.js +++ b/landing/landing.react.js @@ -1,101 +1,101 @@ // @flow import * as React from 'react'; import { useRouteMatch } from 'react-router-dom'; import { ModalProvider, useModalContext, -} from 'lib/components/modal-provider.react'; +} from 'lib/components/modal-provider.react.js'; -import AppLanding from './app-landing.react'; -import Footer from './footer.react'; -import Header from './header.react'; -import Investors from './investors.react'; -import Keyservers from './keyservers.react'; +import AppLanding from './app-landing.react.js'; +import Footer from './footer.react.js'; +import Header from './header.react.js'; +import Investors from './investors.react.js'; +import Keyservers from './keyservers.react.js'; import css from './landing.css'; -import Privacy from './privacy.react'; -import QR from './qr.react'; -import SIWE from './siwe.react'; -import Support from './support.react'; -import Team from './team.react'; -import Terms from './terms.react'; -import useScrollToTopOnNavigate from './use-scroll-to-top-on-navigate.react'; +import Privacy from './privacy.react.js'; +import QR from './qr.react.js'; +import SIWE from './siwe.react.js'; +import Support from './support.react.js'; +import Team from './team.react.js'; +import Terms from './terms.react.js'; +import useScrollToTopOnNavigate from './use-scroll-to-top-on-navigate.react.js'; import './reset.css'; import './global.css'; function Landing(): React.Node { const onSIWE = useRouteMatch({ path: '/siwe' }); if (onSIWE) { return ; } return ( ); } function LandingSite(): React.Node { const modalContext = useModalContext(); const modals = React.useMemo( () => modalContext.modals.map(([modal, key]) => ( {modal} )), [modalContext.modals], ); useScrollToTopOnNavigate(); const onPrivacy = useRouteMatch({ path: '/privacy' }); const onTerms = useRouteMatch({ path: '/terms' }); const onSupport = useRouteMatch({ path: '/support' }); const onKeyservers = useRouteMatch({ path: '/keyservers' }); const onQR = useRouteMatch({ path: '/qr' }); const onTeam = useRouteMatch({ path: '/team' }); const onInvestors = useRouteMatch({ path: '/investors' }); const activePage = React.useMemo(() => { if (onPrivacy) { return ; } else if (onTerms) { return ; } else if (onSupport) { return ; } else if (onKeyservers) { return ; } else if (onQR) { return ; } else if (onTeam) { return ; } else if (onInvestors) { return ; } else { return ; } }, [onKeyservers, onPrivacy, onSupport, onTerms, onTeam, onInvestors, onQR]); let header =
; if (onQR) { header = null; } let footer =