diff --git a/keyserver/src/deleters/upload-deleters.js b/keyserver/src/deleters/upload-deleters.js index 4f44ed1d2..5bbf34983 100644 --- a/keyserver/src/deleters/upload-deleters.js +++ b/keyserver/src/deleters/upload-deleters.js @@ -1,56 +1,79 @@ // @flow import { ServerError } from 'lib/utils/errors.js'; import { dbQuery, SQL } from '../database/database.js'; +import { deleteBlob, removeBlobHolders } from '../services/blob.js'; import type { Viewer } from '../session/viewer.js'; +import { blobHoldersFromUploadRows } from '../uploads/media-utils.js'; async function deleteUpload(viewer: Viewer, id: string): Promise { if (!viewer.loggedIn) { throw new ServerError('not_logged_in'); } const fetchQuery = SQL` - SELECT uploader, container, user_container AS userContainer + SELECT uploader, container, user_container AS userContainer, extra FROM uploads WHERE id = ${id} `; const [result] = await dbQuery(fetchQuery); if (result.length === 0) { throw new ServerError('invalid_parameters'); } const [row] = result; - const { uploader, container, userContainer } = row; + const { uploader, container, userContainer, extra } = row; if ( uploader.toString() !== viewer.userID || container !== null || userContainer !== null ) { throw new ServerError('invalid_parameters'); } + const { blobHash, blobHolder } = JSON.parse(extra); + if (blobHash && blobHolder) { + await deleteBlob({ + hash: blobHash, + holder: blobHolder, + }); + } + const deleteQuery = SQL` DELETE u, i FROM uploads u LEFT JOIN ids i ON i.id = u.id WHERE u.id = ${id} `; await dbQuery(deleteQuery); } const maxUnassignedUploadAge = 24 * 60 * 60 * 1000; async function deleteUnassignedUploads(): Promise { const oldestUnassignedUploadToKeep = Date.now() - maxUnassignedUploadAge; - await dbQuery(SQL` + + const holdersQuery = SQL` + SELECT extra + FROM uploads + WHERE container IS NULL + AND user_container IS NULL + AND creation_time < ${oldestUnassignedUploadToKeep} + `; + const [rows] = await dbQuery(holdersQuery); + const blobHolders = blobHoldersFromUploadRows(rows); + await removeBlobHolders(blobHolders); + + const deletionQuery = SQL` DELETE u, i FROM uploads u LEFT JOIN ids i ON i.id = u.id WHERE u.container IS NULL AND u.user_container IS NULL AND creation_time < ${oldestUnassignedUploadToKeep} - `); + `; + await dbQuery(deletionQuery); } export { deleteUpload, deleteUnassignedUploads }; diff --git a/keyserver/src/uploads/media-utils.js b/keyserver/src/uploads/media-utils.js index efee5bb3c..32fcc905b 100644 --- a/keyserver/src/uploads/media-utils.js +++ b/keyserver/src/uploads/media-utils.js @@ -1,242 +1,260 @@ // @flow import bmp from '@vingle/bmp-js'; import invariant from 'invariant'; import sharp from 'sharp'; import { serverTranscodableTypes, serverCanHandleTypes, readableFilename, mediaConfig, } from 'lib/media/file-utils.js'; import { getImageProcessingPlan } from 'lib/media/image-utils.js'; +import type { BlobHashAndHolder } from 'lib/types/holder-types.js'; import type { Dimensions } from 'lib/types/media-types.js'; import { deepFileInfoFromData } from 'web/media/file-utils.js'; import type { UploadInput } from '../creators/upload-creator.js'; function initializeSharp(buffer: Buffer, mime: string) { if (mime !== 'image/bmp') { return sharp(buffer); } const bitmap = bmp.decode(buffer, true); return sharp(bitmap.data, { raw: { width: bitmap.width, height: bitmap.height, channels: 4, }, }); } function getMediaType(inputMimeType: string): 'photo' | 'video' | null { if (!serverCanHandleTypes.has(inputMimeType)) { return null; } const mediaType = mediaConfig[inputMimeType]?.mediaType; invariant( mediaType === 'photo' || mediaType === 'video', `mediaType for ${inputMimeType} should be photo or video`, ); return mediaType; } type ValidateAndConvertInput = { +initialBuffer: Buffer, +initialName: string, +inputDimensions: ?Dimensions, +inputLoop: boolean, +inputEncryptionKey: ?string, +inputMimeType: ?string, +inputThumbHash: ?string, +size: number, // in bytes }; async function validateAndConvert( input: ValidateAndConvertInput, ): Promise { const { initialBuffer, initialName, inputDimensions, inputLoop, inputEncryptionKey, inputMimeType, inputThumbHash, size, // in bytes } = input; const passthroughParams = { loop: inputLoop, ...(inputThumbHash ? { thumbHash: inputThumbHash } : undefined), }; // we don't want to transcode encrypted files if (inputEncryptionKey) { invariant( inputMimeType, 'inputMimeType should be set in validateAndConvert for encrypted files', ); invariant( inputDimensions, 'inputDimensions should be set in validateAndConvert for encrypted files', ); const mediaType = getMediaType(inputMimeType); if (!mediaType) { return null; } return { ...passthroughParams, name: initialName, mime: inputMimeType, mediaType, content: { storage: 'keyserver', buffer: initialBuffer }, dimensions: inputDimensions, encryptionKey: inputEncryptionKey, }; } const { mime, mediaType } = deepFileInfoFromData(initialBuffer); if (!mime || !mediaType) { return null; } if (!serverCanHandleTypes.has(mime)) { return null; } if (mediaType === 'video') { invariant( inputDimensions, 'inputDimensions should be set in validateAndConvert', ); return { ...passthroughParams, mime: mime, mediaType: mediaType, name: initialName, content: { storage: 'keyserver', buffer: initialBuffer }, dimensions: inputDimensions, }; } if (!serverTranscodableTypes.has(mime)) { // This should've gotten converted on the client return null; } const convertedImage = await convertImage( initialBuffer, mime, initialName, inputDimensions, inputLoop, size, ); if (!convertedImage) { return null; } return { ...passthroughParams, ...convertedImage, }; } async function convertImage( initialBuffer: Buffer, mime: string, initialName: string, inputDimensions: ?Dimensions, inputLoop: boolean, size: number, ): Promise { let sharpImage, metadata; try { sharpImage = initializeSharp(initialBuffer, mime); metadata = await sharpImage.metadata(); } catch (e) { return null; } let initialDimensions = inputDimensions; if (!initialDimensions) { if (metadata.orientation && metadata.orientation > 4) { initialDimensions = { width: metadata.height, height: metadata.width }; } else { initialDimensions = { width: metadata.width, height: metadata.height }; } } const plan = getImageProcessingPlan({ inputMIME: mime, inputDimensions: initialDimensions, inputFileSize: size, inputOrientation: metadata.orientation, }); if (plan.action === 'none') { const name = readableFilename(initialName, mime); invariant(name, `should be able to construct filename for ${mime}`); return { mime, mediaType: 'photo', name, content: { storage: 'keyserver', buffer: initialBuffer }, dimensions: initialDimensions, loop: inputLoop, }; } console.log(`processing image with ${JSON.stringify(plan)}`); const { targetMIME, compressionRatio, fitInside, shouldRotate } = plan; if (shouldRotate) { sharpImage = sharpImage.rotate(); } if (fitInside) { sharpImage = sharpImage.resize(fitInside.width, fitInside.height, { fit: 'inside', withoutEnlargement: true, }); } if (targetMIME === 'image/png') { sharpImage = sharpImage.png(); } else { sharpImage = sharpImage.jpeg({ quality: compressionRatio * 100 }); } const { data: convertedBuffer, info } = await sharpImage.toBuffer({ resolveWithObject: true, }); const convertedDimensions = { width: info.width, height: info.height }; const { mime: convertedMIME, mediaType: convertedMediaType } = deepFileInfoFromData(convertedBuffer); if ( !convertedMIME || !convertedMediaType || convertedMIME !== targetMIME || convertedMediaType !== 'photo' ) { return null; } const convertedName = readableFilename(initialName, targetMIME); if (!convertedName) { return null; } return { mime: targetMIME, mediaType: 'photo', name: convertedName, content: { storage: 'keyserver', buffer: convertedBuffer }, dimensions: convertedDimensions, loop: inputLoop, }; } -export { getMediaType, validateAndConvert }; +function blobHoldersFromUploadRows( + rows: $ReadOnlyArray, +): $ReadOnlyArray { + const results = []; + for (const { extra } of rows) { + if (!extra) { + continue; + } + const { blobHash, blobHolder } = JSON.parse(extra); + if (blobHash && blobHolder) { + results.push({ blobHash, holder: blobHolder }); + } + } + + return results; +} + +export { blobHoldersFromUploadRows, getMediaType, validateAndConvert };