diff --git a/keyserver/src/cron/backups.js b/keyserver/src/cron/backups.js index 7fa35bd35..0c64c2e4e 100644 --- a/keyserver/src/cron/backups.js +++ b/keyserver/src/cron/backups.js @@ -1,277 +1,278 @@ // @flow import childProcess from 'child_process'; import dateFormat from 'dateformat'; import fs from 'fs'; import invariant from 'invariant'; import { ReReadable } from 'rereadable-stream'; import { PassThrough } from 'stream'; import { promisify } from 'util'; import zlib from 'zlib'; +import { getCommConfig } from 'lib/utils/comm-config.js'; + import { getDBConfig, type DBConfig } from '../database/db-config.js'; -import { importJSON } from '../utils/import-json.js'; const readdir = promisify(fs.readdir); const lstat = promisify(fs.lstat); const unlink = promisify(fs.unlink); type BackupConfig = { +enabled: boolean, +directory: string, +maxDirSizeMiB?: ?number, }; function getBackupConfig(): Promise { - return importJSON({ folder: 'facts', name: 'backups' }); + return getCommConfig({ folder: 'facts', name: 'backups' }); } async function backupDB() { const [backupConfig, dbConfig] = await Promise.all([ getBackupConfig(), getDBConfig(), ]); if (!backupConfig || !backupConfig.enabled) { return; } const dateString = dateFormat('yyyy-mm-dd-HH:MM'); const filename = `comm.${dateString}.sql.gz`; const filePath = `${backupConfig.directory}/${filename}`; const rawStream = new PassThrough(); (async () => { try { await mysqldump(dbConfig, filename, rawStream, ['--no-data'], { end: false, }); } catch {} try { const ignoreReports = `--ignore-table=${dbConfig.database}.reports`; await mysqldump(dbConfig, filename, rawStream, [ '--no-create-info', ignoreReports, ]); } catch { rawStream.end(); } })(); const gzippedBuffer = new ReReadable(); rawStream .on('error', (e: Error) => { console.warn(`mysqldump stdout stream emitted error for ${filename}`, e); }) .pipe(zlib.createGzip()) .on('error', (e: Error) => { console.warn(`gzip transform stream emitted error for ${filename}`, e); }) .pipe(gzippedBuffer); try { await saveBackup(filename, filePath, gzippedBuffer); } catch (e) { console.warn(`saveBackup threw for ${filename}`, e); await unlink(filePath); } await deleteOldBackupsIfSpaceExceeded(); } function mysqldump( dbConfig: DBConfig, filename: string, rawStream: PassThrough, extraParams: $ReadOnlyArray, pipeParams?: { end?: boolean, ... }, ): Promise { const mysqlDump = childProcess.spawn( 'mysqldump', [ '-h', dbConfig.host, '-u', dbConfig.user, `-p${dbConfig.password}`, '--single-transaction', '--no-tablespaces', '--default-character-set=utf8mb4', '--net-buffer-length=523264', ...extraParams, dbConfig.database, ], { stdio: ['ignore', 'pipe', 'ignore'], }, ); const extraParamsString = extraParams.join(' '); return new Promise((resolve, reject) => { mysqlDump.on('error', (e: Error) => { console.warn( `error trying to spawn mysqldump ${extraParamsString} for ${filename}`, e, ); reject(e); }); mysqlDump.on('exit', (code: number | null, signal: string | null) => { if (signal !== null && signal !== undefined) { console.warn( `mysqldump ${extraParamsString} received signal ${signal} for ` + filename, ); reject(new Error(`mysqldump ${JSON.stringify({ code, signal })}`)); } else if (code !== null && code !== 0) { console.warn( `mysqldump ${extraParamsString} exited with code ${code} for ` + filename, ); reject(new Error(`mysqldump ${JSON.stringify({ code, signal })}`)); } resolve(); }); mysqlDump.stdout.pipe(rawStream, pipeParams); }); } async function saveBackup( filename: string, filePath: string, gzippedBuffer: ReReadable, retries: number = 2, ): Promise { try { await trySaveBackup(filename, filePath, gzippedBuffer); } catch (saveError) { if (saveError.code !== 'ENOSPC') { throw saveError; } if (!retries) { throw saveError; } try { await deleteOldestBackup(); } catch (deleteError) { if (deleteError.message === 'no_backups_left') { throw saveError; } else { throw deleteError; } } await saveBackup(filename, filePath, gzippedBuffer, retries - 1); } } const backupWatchFrequency = 60 * 1000; function trySaveBackup( filename: string, filePath: string, gzippedBuffer: ReReadable, ): Promise { const timeoutObject: { timeout: ?TimeoutID } = { timeout: null }; const setBackupTimeout = (alreadyWaited: number) => { timeoutObject.timeout = setTimeout(() => { const nowWaited = alreadyWaited + backupWatchFrequency; console.log( `writing backup for ${filename} has taken ${nowWaited}ms so far`, ); setBackupTimeout(nowWaited); }, backupWatchFrequency); }; setBackupTimeout(0); const writeStream = fs.createWriteStream(filePath); return new Promise((resolve, reject) => { gzippedBuffer .rewind() .pipe(writeStream) .on('finish', () => { clearTimeout(timeoutObject.timeout); resolve(); }) .on('error', (e: Error) => { clearTimeout(timeoutObject.timeout); console.warn(`write stream emitted error for ${filename}`, e); reject(e); }); }); } async function deleteOldestBackup() { const sortedBackupInfos = await getSortedBackupInfos(); if (sortedBackupInfos.length === 0) { throw new Error('no_backups_left'); } const oldestFilename = sortedBackupInfos[0].filename; await deleteBackup(oldestFilename); } async function deleteBackup(filename: string) { const backupConfig = await getBackupConfig(); invariant(backupConfig, 'backupConfig should be non-null'); try { await unlink(`${backupConfig.directory}/${filename}`); } catch (e) { // Check if it's already been deleted if (e.code !== 'ENOENT') { throw e; } } } type BackupInfo = { +filename: string, +lastModifiedTime: number, +bytes: number, }; async function getSortedBackupInfos(): Promise { const backupConfig = await getBackupConfig(); invariant(backupConfig, 'backupConfig should be non-null'); const filenames = await readdir(backupConfig.directory); const backups = await Promise.all( filenames.map(async filename => { if (!filename.startsWith('comm.') || !filename.endsWith('.sql.gz')) { return null; } const stats = await lstat(`${backupConfig.directory}/${filename}`); if (stats.isDirectory()) { return null; } return { filename, lastModifiedTime: stats.mtime, bytes: stats.size, }; }), ); const filteredBackups = backups.filter(Boolean); filteredBackups.sort((a, b) => a.lastModifiedTime - b.lastModifiedTime); return filteredBackups; } async function deleteOldBackupsIfSpaceExceeded() { const backupConfig = await getBackupConfig(); invariant(backupConfig, 'backupConfig should be non-null'); const { maxDirSizeMiB } = backupConfig; if (!maxDirSizeMiB) { return; } const sortedBackupInfos = await getSortedBackupInfos(); const mostRecentBackup = sortedBackupInfos.pop(); let bytesLeft = maxDirSizeMiB * 1024 * 1024 - mostRecentBackup.bytes; const deleteBackupPromises = []; for (let i = sortedBackupInfos.length - 1; i >= 0; i--) { const backupInfo = sortedBackupInfos[i]; bytesLeft -= backupInfo.bytes; if (bytesLeft <= 0) { deleteBackupPromises.push(deleteBackup(backupInfo.filename)); } } await Promise.all(deleteBackupPromises); } export { backupDB }; diff --git a/keyserver/src/cron/update-geoip-db.js b/keyserver/src/cron/update-geoip-db.js index 4f14efe6c..b40e70b09 100644 --- a/keyserver/src/cron/update-geoip-db.js +++ b/keyserver/src/cron/update-geoip-db.js @@ -1,62 +1,63 @@ // @flow import childProcess from 'child_process'; import cluster from 'cluster'; import geoip from 'geoip-lite'; +import { getCommConfig } from 'lib/utils/comm-config.js'; + import { handleAsyncPromise } from '../responders/handlers.js'; -import { importJSON } from '../utils/import-json.js'; async function updateGeoipDB(): Promise { - const geoipLicense = await importJSON({ + const geoipLicense = await getCommConfig({ folder: 'secrets', name: 'geoip_license', }); if (!geoipLicense) { console.log('no keyserver/secrets/geoip_license.json so skipping update'); return; } await spawnUpdater(geoipLicense); } function spawnUpdater(geoipLicense: { key: string }): Promise { const spawned = childProcess.spawn(process.execPath, [ '../node_modules/geoip-lite/scripts/updatedb.js', `license_key=${geoipLicense.key}`, ]); return new Promise((resolve, reject) => { spawned.on('error', reject); spawned.on('exit', () => resolve()); }); } function reloadGeoipDB(): Promise { return new Promise(resolve => geoip.reloadData(resolve)); } type IPCMessage = { type: 'geoip_reload', }; const reloadMessage: IPCMessage = { type: 'geoip_reload' }; async function updateAndReloadGeoipDB(): Promise { await updateGeoipDB(); await reloadGeoipDB(); if (!cluster.isMaster) { return; } for (const id in cluster.workers) { cluster.workers[Number(id)].send(reloadMessage); } } if (!cluster.isMaster) { process.on('message', (ipcMessage: IPCMessage) => { if (ipcMessage.type === 'geoip_reload') { handleAsyncPromise(reloadGeoipDB()); } }); } export { updateGeoipDB, updateAndReloadGeoipDB }; diff --git a/keyserver/src/database/db-config.js b/keyserver/src/database/db-config.js index 55082fd61..8dae4525c 100644 --- a/keyserver/src/database/db-config.js +++ b/keyserver/src/database/db-config.js @@ -1,70 +1,70 @@ // @flow import invariant from 'invariant'; -import { importJSON } from '../utils/import-json.js'; +import { getCommConfig } from 'lib/utils/comm-config.js'; type DBType = 'mariadb10.8'; export type DBConfig = { +host: string, +user: string, +password: string, +database: string, +dbType: DBType, }; function assertValidDBType(dbType: ?string): DBType { invariant( dbType, 'dbType not specified in DB config. Following the MySQL deprecation this ' + 'is a required parameter. Please follow this Gist to migrate to ' + 'MariaDB: ' + 'https://gist.github.com/Ashoat/3a5ded2549db082c5516606f3c3c5da5', ); invariant( dbType !== 'mysql5.7', 'We no longer support MySQL. Please follow this Gist to migrate to ' + 'MariaDB: ' + 'https://gist.github.com/Ashoat/3a5ded2549db082c5516606f3c3c5da5', ); invariant(dbType === 'mariadb10.8', `${dbType} is not a valid dbType`); return dbType; } let dbConfig; async function getDBConfig(): Promise { if (dbConfig !== undefined) { return dbConfig; } if ( process.env.COMM_DATABASE_DATABASE && process.env.COMM_DATABASE_USER && process.env.COMM_DATABASE_PASSWORD ) { dbConfig = { host: process.env.COMM_DATABASE_HOST || 'localhost', user: process.env.COMM_DATABASE_USER, password: process.env.COMM_DATABASE_PASSWORD, database: process.env.COMM_DATABASE_DATABASE, dbType: assertValidDBType(process.env.COMM_DATABASE_TYPE), }; } else { - const importedDBConfig = await importJSON({ + const importedDBConfig = await getCommConfig({ folder: 'secrets', name: 'db_config', }); invariant(importedDBConfig, 'DB config missing'); dbConfig = { ...importedDBConfig, dbType: assertValidDBType(importedDBConfig.dbType), }; } return dbConfig; } async function getDBType(): Promise { const config = await getDBConfig(); return config.dbType; } export { getDBConfig, getDBType }; diff --git a/keyserver/src/emails/sendmail.js b/keyserver/src/emails/sendmail.js index 464aaaa85..835c58f2a 100644 --- a/keyserver/src/emails/sendmail.js +++ b/keyserver/src/emails/sendmail.js @@ -1,55 +1,54 @@ // @flow import invariant from 'invariant'; import nodemailer from 'nodemailer'; +import { getCommConfig } from 'lib/utils/comm-config.js'; import { isDev } from 'lib/utils/dev-utils.js'; -import { importJSON } from '../utils/import-json.js'; - type MailInfo = { +from: string, +to: string, +subject: string, +html: string, ... }; type Transport = { +sendMail: (info: MailInfo) => Promise, ... }; type PostmarkConfig = { +apiToken: string, }; let cachedTransport: ?Transport; async function getSendmail(): Promise { if (cachedTransport) { return cachedTransport; } - const postmark: ?PostmarkConfig = await importJSON({ + const postmark: ?PostmarkConfig = await getCommConfig({ folder: 'secrets', name: 'postmark', }); if (isDev && !postmark) { cachedTransport = nodemailer.createTransport({ sendmail: true }); return cachedTransport; } invariant(postmark, 'Postmark config missing'); cachedTransport = nodemailer.createTransport({ host: 'smtp.postmarkapp.com', port: 587, secure: false, auth: { user: postmark.apiToken, pass: postmark.apiToken, }, requireTLS: true, }); return cachedTransport; } export default getSendmail; diff --git a/keyserver/src/push/providers.js b/keyserver/src/push/providers.js index 61bcaa88d..8e2d434cc 100644 --- a/keyserver/src/push/providers.js +++ b/keyserver/src/push/providers.js @@ -1,135 +1,134 @@ // @flow import apn from '@parse/node-apn'; import type { Provider as APNProvider } from '@parse/node-apn'; import fcmAdmin from 'firebase-admin'; import type { FirebaseApp } from 'firebase-admin'; import invariant from 'invariant'; import webpush from 'web-push'; import type { PlatformDetails } from 'lib/types/device-types'; - -import { importJSON } from '../utils/import-json.js'; +import { getCommConfig } from 'lib/utils/comm-config.js'; type APNPushProfile = 'apn_config' | 'comm_apn_config'; function getAPNPushProfileForCodeVersion( platformDetails: PlatformDetails, ): APNPushProfile { if (platformDetails.platform === 'macos') { return 'comm_apn_config'; } return platformDetails.codeVersion && platformDetails.codeVersion >= 87 ? 'comm_apn_config' : 'apn_config'; } type FCMPushProfile = 'fcm_config' | 'comm_fcm_config'; function getFCMPushProfileForCodeVersion(codeVersion: ?number): FCMPushProfile { return codeVersion && codeVersion >= 87 ? 'comm_fcm_config' : 'fcm_config'; } const cachedAPNProviders = new Map(); async function getAPNProvider(profile: APNPushProfile): Promise { const provider = cachedAPNProviders.get(profile); if (provider !== undefined) { return provider; } try { - const apnConfig = await importJSON({ folder: 'secrets', name: profile }); + const apnConfig = await getCommConfig({ folder: 'secrets', name: profile }); invariant(apnConfig, `APN config missing for ${profile}`); if (!cachedAPNProviders.has(profile)) { cachedAPNProviders.set(profile, new apn.Provider(apnConfig)); } } catch { if (!cachedAPNProviders.has(profile)) { cachedAPNProviders.set(profile, null); } } return cachedAPNProviders.get(profile); } const cachedFCMProviders = new Map(); async function getFCMProvider(profile: FCMPushProfile): Promise { const provider = cachedFCMProviders.get(profile); if (provider !== undefined) { return provider; } try { - const fcmConfig = await importJSON({ folder: 'secrets', name: profile }); + const fcmConfig = await getCommConfig({ folder: 'secrets', name: profile }); invariant(fcmConfig, `FCM config missed for ${profile}`); if (!cachedFCMProviders.has(profile)) { cachedFCMProviders.set( profile, fcmAdmin.initializeApp( { credential: fcmAdmin.credential.cert(fcmConfig), }, profile, ), ); } } catch { if (!cachedFCMProviders.has(profile)) { cachedFCMProviders.set(profile, null); } } return cachedFCMProviders.get(profile); } function endFirebase() { fcmAdmin.apps?.forEach(app => app?.delete()); } function endAPNs() { for (const provider of cachedAPNProviders.values()) { provider?.shutdown(); } } function getAPNsNotificationTopic(platformDetails: PlatformDetails): string { if (platformDetails.platform === 'macos') { return 'app.comm.macos'; } return platformDetails.codeVersion && platformDetails.codeVersion >= 87 ? 'app.comm' : 'org.squadcal.app'; } type WebPushConfig = { +publicKey: string, +privateKey: string }; let cachedWebPushConfig: ?WebPushConfig = null; async function getWebPushConfig(): Promise { if (cachedWebPushConfig) { return cachedWebPushConfig; } - cachedWebPushConfig = await importJSON({ + cachedWebPushConfig = await getCommConfig({ folder: 'secrets', name: 'web_push_config', }); if (cachedWebPushConfig) { webpush.setVapidDetails( 'mailto:support@comm.app', cachedWebPushConfig.publicKey, cachedWebPushConfig.privateKey, ); } return cachedWebPushConfig; } async function ensureWebPushInitialized() { if (cachedWebPushConfig) { return; } await getWebPushConfig(); } export { getAPNPushProfileForCodeVersion, getFCMPushProfileForCodeVersion, getAPNProvider, getFCMProvider, endFirebase, endAPNs, getAPNsNotificationTopic, getWebPushConfig, ensureWebPushInitialized, }; diff --git a/keyserver/src/utils/olm-utils.js b/keyserver/src/utils/olm-utils.js index 796d5052b..6ece83e83 100644 --- a/keyserver/src/utils/olm-utils.js +++ b/keyserver/src/utils/olm-utils.js @@ -1,29 +1,32 @@ // @flow import olm from '@matrix-org/olm'; import type { Utility as OlmUtility } from '@matrix-org/olm'; import invariant from 'invariant'; -import { importJSON } from './import-json.js'; +import { getCommConfig } from 'lib/utils/comm-config.js'; type OlmConfig = { +picklingKey: string, +pickledAccount: string, }; async function getOlmConfig(): Promise { - const olmConfig = await importJSON({ folder: 'secrets', name: 'olm_config' }); + const olmConfig = await getCommConfig({ + folder: 'secrets', + name: 'olm_config', + }); invariant(olmConfig, 'OLM config missing'); return olmConfig; } let cachedOLMUtility: OlmUtility; function getOlmUtility(): OlmUtility { if (cachedOLMUtility) { return cachedOLMUtility; } cachedOLMUtility = new olm.Utility(); return cachedOLMUtility; } export { getOlmConfig, getOlmUtility }; diff --git a/keyserver/src/utils/urls.js b/keyserver/src/utils/urls.js index 8240c050f..be6744195 100644 --- a/keyserver/src/utils/urls.js +++ b/keyserver/src/utils/urls.js @@ -1,94 +1,93 @@ // @flow import invariant from 'invariant'; +import { getCommConfig } from 'lib/utils/comm-config.js'; import { values } from 'lib/utils/objects.js'; -import { importJSON } from './import-json.js'; - export type AppURLFacts = { +baseDomain: string, +basePath: string, +https: boolean, +baseRoutePath: string, +proxy?: 'apache' | 'none', // defaults to apache }; const validProxies = new Set(['apache', 'none']); const sitesObj = Object.freeze({ a: 'landing', b: 'commapp', c: 'squadcal', }); export type Site = $Values; const sites: $ReadOnlyArray = values(sitesObj); const cachedURLFacts = new Map(); async function fetchURLFacts(site: Site): Promise { const existing = cachedURLFacts.get(site); if (existing !== undefined) { return existing; } - let urlFacts: ?AppURLFacts = await importJSON({ + let urlFacts: ?AppURLFacts = await getCommConfig({ folder: 'facts', name: `${site}_url`, }); if (urlFacts) { const { proxy } = urlFacts; urlFacts = { ...urlFacts, proxy: validProxies.has(proxy) ? proxy : 'apache', }; } cachedURLFacts.set(site, urlFacts); return urlFacts; } async function prefetchAllURLFacts() { await Promise.all(sites.map(fetchURLFacts)); } function getSquadCalURLFacts(): ?AppURLFacts { return cachedURLFacts.get('squadcal'); } function getCommAppURLFacts(): ?AppURLFacts { return cachedURLFacts.get('commapp'); } function getAndAssertCommAppURLFacts(): AppURLFacts { const urlFacts = getCommAppURLFacts(); invariant(urlFacts, 'keyserver/facts/commapp_url.json missing'); return urlFacts; } function getAppURLFactsFromRequestURL(url: string): AppURLFacts { const commURLFacts = getCommAppURLFacts(); if (commURLFacts && url.startsWith(commURLFacts.baseRoutePath)) { return commURLFacts; } const squadCalURLFacts = getSquadCalURLFacts(); if (squadCalURLFacts) { return squadCalURLFacts; } invariant(false, 'request received but no URL facts are present'); } function getLandingURLFacts(): ?AppURLFacts { return cachedURLFacts.get('landing'); } function getAndAssertLandingURLFacts(): AppURLFacts { const urlFacts = getLandingURLFacts(); invariant(urlFacts, 'keyserver/facts/landing_url.json missing'); return urlFacts; } export { prefetchAllURLFacts, getSquadCalURLFacts, getCommAppURLFacts, getAndAssertCommAppURLFacts, getLandingURLFacts, getAndAssertLandingURLFacts, getAppURLFactsFromRequestURL, }; diff --git a/keyserver/src/utils/import-json.js b/lib/utils/comm-config.js similarity index 82% rename from keyserver/src/utils/import-json.js rename to lib/utils/comm-config.js index 028e87122..0ec739e70 100644 --- a/keyserver/src/utils/import-json.js +++ b/lib/utils/comm-config.js @@ -1,60 +1,64 @@ // @flow import fs from 'fs'; import { promisify } from 'util'; const readFile = promisify(fs.readFile); type ConfigName = { +folder: 'secrets' | 'facts', +name: string, }; function getKeyForConfigName(configName: ConfigName): string { return `${configName.folder}_${configName.name}`; } function getPathForConfigName(configName: ConfigName): string { return `${configName.folder}/${configName.name}.json`; } const cachedJSON = new Map(); -async function importJSON(configName: ConfigName): Promise { + +// This function checks for an env var named COMM_JSONCONFIG_{folder}_{name} +// If it doesn't find one, it then looks for keyserver/{folder}/{name}.json +// In both cases, it expects to find a JSON blob +async function getCommConfig(configName: ConfigName): Promise { const key = getKeyForConfigName(configName); const cached = cachedJSON.get(key); if (cached !== undefined) { return cached; } const json = await getJSON(configName); if (!cachedJSON.has(key)) { cachedJSON.set(key, json); } return cachedJSON.get(key); } async function getJSON(configName: ConfigName): Promise { const key = getKeyForConfigName(configName); const fromEnv = process.env[`COMM_JSONCONFIG_${key}`]; if (fromEnv) { try { return JSON.parse(fromEnv); } catch (e) { console.log( `failed to parse JSON from env for ${JSON.stringify(configName)}`, e, ); } } const path = getPathForConfigName(configName); try { const pathString = await readFile(path, 'utf8'); return JSON.parse(pathString); } catch (e) { if (e.code !== 'ENOENT') { console.log(`Failed to read JSON from ${path}`, e); } return null; } } -export { importJSON }; +export { getCommConfig };