diff --git a/keyserver/src/cron/backups.js b/keyserver/src/cron/backups.js index 06859f7fd..084c63220 100644 --- a/keyserver/src/cron/backups.js +++ b/keyserver/src/cron/backups.js @@ -1,222 +1,204 @@ // @flow import childProcess from 'child_process'; import dateFormat from 'dateformat'; import fs from 'fs'; import invariant from 'invariant'; import { ReReadable } from 'rereadable-stream'; import { PassThrough } from 'stream'; import { promisify } from 'util'; import zlib from 'zlib'; import dbConfig from '../../secrets/db_config'; +import { importJSON } from '../utils/import-json'; const readdir = promisify(fs.readdir); const lstat = promisify(fs.lstat); const unlink = promisify(fs.unlink); -let importedBackupConfig = undefined; -async function importBackupConfig() { - if (importedBackupConfig !== undefined) { - return importedBackupConfig; - } - try { - // $FlowFixMe - const backupExports = await import('../../facts/backups'); - if (importedBackupConfig === undefined) { - importedBackupConfig = backupExports.default; - } - } catch { - if (importedBackupConfig === undefined) { - importedBackupConfig = null; - } - } - return importedBackupConfig; -} - async function backupDB() { - const backupConfig = await importBackupConfig(); + const backupConfig = await importJSON('facts/backups'); if (!backupConfig || !backupConfig.enabled) { return; } const dateString = dateFormat('yyyy-mm-dd-HH:MM'); const filename = `comm.${dateString}.sql.gz`; const filePath = `${backupConfig.directory}/${filename}`; const rawStream = new PassThrough(); (async () => { try { await mysqldump(filename, rawStream, ['--no-data'], { end: false }); } catch {} try { const ignoreReports = `--ignore-table=${dbConfig.database}.reports`; await mysqldump(filename, rawStream, ['--no-create-info', ignoreReports]); } catch { rawStream.end(); } })(); const gzippedBuffer = new ReReadable(); rawStream .on('error', (e: Error) => { console.warn(`mysqldump stdout stream emitted error for ${filename}`, e); }) .pipe(zlib.createGzip()) .on('error', (e: Error) => { console.warn(`gzip transform stream emitted error for ${filename}`, e); }) .pipe(gzippedBuffer); try { await saveBackup(filename, filePath, gzippedBuffer); } catch (e) { console.warn(`saveBackup threw for ${filename}`, e); await unlink(filePath); } } function mysqldump( filename: string, rawStream: PassThrough, extraParams: $ReadOnlyArray, pipeParams?: { end?: boolean, ... }, ): Promise { const mysqlDump = childProcess.spawn( 'mysqldump', [ '-u', dbConfig.user, `-p${dbConfig.password}`, '--single-transaction', '--no-tablespaces', ...extraParams, dbConfig.database, ], { stdio: ['ignore', 'pipe', 'ignore'], }, ); const extraParamsString = extraParams.join(' '); return new Promise((resolve, reject) => { mysqlDump.on('error', (e: Error) => { console.warn( `error trying to spawn mysqldump ${extraParamsString} for ${filename}`, e, ); reject(e); }); mysqlDump.on('exit', (code: number | null, signal: string | null) => { if (signal !== null && signal !== undefined) { console.warn( `mysqldump ${extraParamsString} received signal ${signal} for ` + filename, ); reject(new Error(`mysqldump ${JSON.stringify({ code, signal })}`)); } else if (code !== null && code !== 0) { console.warn( `mysqldump ${extraParamsString} exited with code ${code} for ` + filename, ); reject(new Error(`mysqldump ${JSON.stringify({ code, signal })}`)); } resolve(); }); mysqlDump.stdout.pipe(rawStream, pipeParams); }); } async function saveBackup( filename: string, filePath: string, gzippedBuffer: ReReadable, retries: number = 2, ): Promise { try { await trySaveBackup(filename, filePath, gzippedBuffer); } catch (saveError) { if (saveError.code !== 'ENOSPC') { throw saveError; } if (!retries) { throw saveError; } try { await deleteOldestBackup(); } catch (deleteError) { if (deleteError.message === 'no_backups_left') { throw saveError; } else { throw deleteError; } } await saveBackup(filename, filePath, gzippedBuffer, retries - 1); } } const backupWatchFrequency = 60 * 1000; function trySaveBackup( filename: string, filePath: string, gzippedBuffer: ReReadable, ): Promise { const timeoutObject: { timeout: ?TimeoutID } = { timeout: null }; const setBackupTimeout = (alreadyWaited: number) => { timeoutObject.timeout = setTimeout(() => { const nowWaited = alreadyWaited + backupWatchFrequency; console.log( `writing backup for ${filename} has taken ${nowWaited}ms so far`, ); setBackupTimeout(nowWaited); }, backupWatchFrequency); }; setBackupTimeout(0); const writeStream = fs.createWriteStream(filePath); return new Promise((resolve, reject) => { gzippedBuffer .rewind() .pipe(writeStream) .on('finish', () => { clearTimeout(timeoutObject.timeout); resolve(); }) .on('error', (e: Error) => { clearTimeout(timeoutObject.timeout); console.warn(`write stream emitted error for ${filename}`, e); reject(e); }); }); } async function deleteOldestBackup() { - const backupConfig = await importBackupConfig(); + const backupConfig = await importJSON('facts/backups'); invariant(backupConfig, 'backupConfig should be non-null'); const files = await readdir(backupConfig.directory); let oldestFile; for (const file of files) { if (!file.endsWith('.sql.gz') || !file.startsWith('comm.')) { continue; } const stat = await lstat(`${backupConfig.directory}/${file}`); if (stat.isDirectory()) { continue; } if (!oldestFile || stat.mtime < oldestFile.mtime) { oldestFile = { file, mtime: stat.mtime }; } } if (!oldestFile) { throw new Error('no_backups_left'); } try { await unlink(`${backupConfig.directory}/${oldestFile.file}`); } catch (e) { // Check if it's already been deleted if (e.code !== 'ENOENT') { throw e; } } } export { backupDB }; diff --git a/keyserver/src/cron/update-geoip-db.js b/keyserver/src/cron/update-geoip-db.js index c261eafb1..ce84032d7 100644 --- a/keyserver/src/cron/update-geoip-db.js +++ b/keyserver/src/cron/update-geoip-db.js @@ -1,77 +1,59 @@ // @flow import childProcess from 'child_process'; import cluster from 'cluster'; import geoip from 'geoip-lite'; import { handleAsyncPromise } from '../responders/handlers'; - -let cachedGeoipLicense = undefined; -async function getGeoipLicense() { - if (cachedGeoipLicense !== undefined) { - return cachedGeoipLicense; - } - try { - // $FlowFixMe - const geoipLicenseImport = await import('../../secrets/geoip_license'); - if (cachedGeoipLicense === undefined) { - cachedGeoipLicense = geoipLicenseImport.default; - } - } catch { - if (cachedGeoipLicense === undefined) { - cachedGeoipLicense = null; - } - } - return cachedGeoipLicense; -} +import { importJSON } from '../utils/import-json'; async function updateGeoipDB(): Promise { - const geoipLicense = await getGeoipLicense(); + const geoipLicense = await importJSON('secrets/geoip_license'); if (!geoipLicense) { console.log('no keyserver/secrets/geoip_license.json so skipping update'); return; } await spawnUpdater(geoipLicense); } function spawnUpdater(geoipLicense: { key: string }): Promise { const spawned = childProcess.spawn(process.execPath, [ '../node_modules/geoip-lite/scripts/updatedb.js', `license_key=${geoipLicense.key}`, ]); return new Promise((resolve, reject) => { spawned.on('error', reject); spawned.on('exit', () => resolve()); }); } function reloadGeoipDB(): Promise { return new Promise(resolve => geoip.reloadData(resolve)); } type IPCMessage = { type: 'geoip_reload', }; const reloadMessage: IPCMessage = { type: 'geoip_reload' }; async function updateAndReloadGeoipDB(): Promise { await updateGeoipDB(); await reloadGeoipDB(); if (!cluster.isMaster) { return; } for (const id in cluster.workers) { cluster.workers[Number(id)].send(reloadMessage); } } if (!cluster.isMaster) { process.on('message', (ipcMessage: IPCMessage) => { if (ipcMessage.type === 'geoip_reload') { handleAsyncPromise(reloadGeoipDB()); } }); } export { updateGeoipDB, updateAndReloadGeoipDB }; diff --git a/keyserver/src/push/providers.js b/keyserver/src/push/providers.js index 9a4c4f848..fd33d1bf9 100644 --- a/keyserver/src/push/providers.js +++ b/keyserver/src/push/providers.js @@ -1,88 +1,91 @@ // @flow import apn from '@parse/node-apn'; import type { Provider as APNProvider } from '@parse/node-apn'; import fcmAdmin from 'firebase-admin'; import type { FirebaseApp } from 'firebase-admin'; +import invariant from 'invariant'; + +import { importJSON } from '../utils/import-json'; type APNPushProfile = 'apn_config' | 'comm_apn_config'; function getAPNPushProfileForCodeVersion(codeVersion: ?number): APNPushProfile { return codeVersion && codeVersion >= 87 ? 'comm_apn_config' : 'apn_config'; } type FCMPushProfile = 'fcm_config' | 'comm_fcm_config'; function getFCMPushProfileForCodeVersion(codeVersion: ?number): FCMPushProfile { return codeVersion && codeVersion >= 87 ? 'comm_fcm_config' : 'fcm_config'; } const cachedAPNProviders = new Map(); async function getAPNProvider(profile: APNPushProfile): Promise { const provider = cachedAPNProviders.get(profile); if (provider !== undefined) { return provider; } try { - // $FlowFixMe - const apnConfig = await import(`../../secrets/${profile}`); + const apnConfig = await importJSON(`secrets/${profile}`); + invariant(apnConfig, `APN config missing for ${profile}`); if (!cachedAPNProviders.has(profile)) { - cachedAPNProviders.set(profile, new apn.Provider(apnConfig.default)); + cachedAPNProviders.set(profile, new apn.Provider(apnConfig)); } } catch { if (!cachedAPNProviders.has(profile)) { cachedAPNProviders.set(profile, null); } } return cachedAPNProviders.get(profile); } const cachedFCMProviders = new Map(); async function getFCMProvider(profile: FCMPushProfile): Promise { const provider = cachedFCMProviders.get(profile); if (provider !== undefined) { return provider; } try { - // $FlowFixMe - const fcmConfig = await import(`../../secrets/${profile}`); + const fcmConfig = await importJSON(`secrets/${profile}`); + invariant(fcmConfig, `FCM config missed for ${profile}`); if (!cachedFCMProviders.has(profile)) { cachedFCMProviders.set( profile, fcmAdmin.initializeApp( { - credential: fcmAdmin.credential.cert(fcmConfig.default), + credential: fcmAdmin.credential.cert(fcmConfig), }, profile, ), ); } } catch { if (!cachedFCMProviders.has(profile)) { cachedFCMProviders.set(profile, null); } } return cachedFCMProviders.get(profile); } function endFirebase() { fcmAdmin.apps?.forEach(app => app?.delete()); } function endAPNs() { for (const provider of cachedAPNProviders.values()) { provider?.shutdown(); } } function getAPNsNotificationTopic(codeVersion: ?number): string { return codeVersion && codeVersion >= 87 ? 'app.comm' : 'org.squadcal.app'; } export { getAPNPushProfileForCodeVersion, getFCMPushProfileForCodeVersion, getAPNProvider, getFCMProvider, endFirebase, endAPNs, getAPNsNotificationTopic, }; diff --git a/keyserver/src/utils/import-json.js b/keyserver/src/utils/import-json.js new file mode 100644 index 000000000..6719b10b6 --- /dev/null +++ b/keyserver/src/utils/import-json.js @@ -0,0 +1,23 @@ +// @flow + +const cachedJSON = new Map(); +async function importJSON(path: string): Promise { + const cached = cachedJSON.get(path); + if (cached !== undefined) { + return cached; + } + try { + // $FlowFixMe + const importedJSON = await import(`../../${path}`); + if (!cachedJSON.has(path)) { + cachedJSON.set(path, importedJSON.default); + } + } catch { + if (!cachedJSON.has(path)) { + cachedJSON.set(path, null); + } + } + return cachedJSON.get(path); +} + +export { importJSON }; diff --git a/keyserver/src/utils/urls.js b/keyserver/src/utils/urls.js index b652b6937..f2fa36f02 100644 --- a/keyserver/src/utils/urls.js +++ b/keyserver/src/utils/urls.js @@ -1,98 +1,91 @@ // @flow import invariant from 'invariant'; import { values } from 'lib/utils/objects'; +import { importJSON } from './import-json'; + export type AppURLFacts = { +baseDomain: string, +basePath: string, +https: boolean, +baseRoutePath: string, }; const sitesObj = Object.freeze({ a: 'landing', b: 'commapp', c: 'squadcal', }); export type Site = $Values; const sites: $ReadOnlyArray = values(sitesObj); const cachedURLFacts = new Map(); async function fetchURLFacts(site: Site): Promise { - const cached = cachedURLFacts.get(site); - if (cached !== undefined) { - return cached; - } - try { - // $FlowFixMe - const urlFacts = await import(`../../facts/${site}_url`); - if (!cachedURLFacts.has(site)) { - cachedURLFacts.set(site, urlFacts.default); - } - } catch { - if (!cachedURLFacts.has(site)) { - cachedURLFacts.set(site, null); - } + const existing = cachedURLFacts.get(site); + if (existing !== undefined) { + return existing; } - return cachedURLFacts.get(site); + const urlFacts: ?AppURLFacts = await importJSON(`facts/${site}_url`); + cachedURLFacts.set(site, urlFacts); + return urlFacts; } async function prefetchAllURLFacts() { await Promise.all(sites.map(fetchURLFacts)); } function getSquadCalURLFacts(): ?AppURLFacts { return cachedURLFacts.get('squadcal'); } function getCommAppURLFacts(): ?AppURLFacts { return cachedURLFacts.get('commapp'); } function getAndAssertCommAppURLFacts(): AppURLFacts { const urlFacts = getCommAppURLFacts(); invariant(urlFacts, 'keyserver/facts/commapp_url.json missing'); return urlFacts; } function getAppURLFactsFromRequestURL(url: string): AppURLFacts { const commURLFacts = getCommAppURLFacts(); if (commURLFacts && url.startsWith(commURLFacts.baseRoutePath)) { return commURLFacts; } const squadCalURLFacts = getSquadCalURLFacts(); if (squadCalURLFacts) { return squadCalURLFacts; } invariant(false, 'request received but no URL facts are present'); } function getLandingURLFacts(): ?AppURLFacts { return cachedURLFacts.get('landing'); } function getAndAssertLandingURLFacts(): AppURLFacts { const urlFacts = getLandingURLFacts(); invariant(urlFacts, 'keyserver/facts/landing_url.json missing'); return urlFacts; } function clientPathFromRouterPath( routerPath: string, urlFacts: AppURLFacts, ): string { const { basePath } = urlFacts; return basePath + routerPath; } export { prefetchAllURLFacts, getSquadCalURLFacts, getCommAppURLFacts, getAndAssertCommAppURLFacts, getLandingURLFacts, getAndAssertLandingURLFacts, getAppURLFactsFromRequestURL, clientPathFromRouterPath, };