diff --git a/native/media/ffmpeg.js b/native/media/ffmpeg.js index 131b33fd7..6a09ffc92 100644 --- a/native/media/ffmpeg.js +++ b/native/media/ffmpeg.js @@ -1,165 +1,167 @@ // @flow import { RNFFmpeg, RNFFprobe, RNFFmpegConfig } from 'react-native-ffmpeg'; import { getHasMultipleFramesProbeCommand } from 'lib/media/video-utils.js'; import type { Dimensions, FFmpegStatistics, VideoInfo, } from 'lib/types/media-types.js'; const maxSimultaneousCalls = { process: 1, probe: 1, }; type CallCounter = typeof maxSimultaneousCalls; type QueuedCommandType = $Keys; type QueuedCommand = { type: QueuedCommandType, runCommand: () => Promise, }; class FFmpeg { queue: QueuedCommand[] = []; currentCalls: CallCounter = { process: 0, probe: 0 }; queueCommand( type: QueuedCommandType, wrappedCommand: () => Promise, ): Promise { return new Promise((resolve, reject) => { const runCommand = async () => { try { const result = await wrappedCommand(); this.currentCalls[type]--; this.possiblyRunCommands(); resolve(result); } catch (e) { reject(e); } }; this.queue.push({ type, runCommand }); this.possiblyRunCommands(); }); } possiblyRunCommands() { let openSlots = {}; for (const type in this.currentCalls) { const currentCalls = this.currentCalls[type]; const maxCalls = maxSimultaneousCalls[type]; const callsLeft = maxCalls - currentCalls; if (!callsLeft) { return; } else if (currentCalls) { openSlots = { [type]: callsLeft }; break; } else { openSlots[type] = callsLeft; } } const toDefer = [], toRun = []; for (const command of this.queue) { const type: string = command.type; if (openSlots[type]) { openSlots = { [type]: openSlots[type] - 1 }; this.currentCalls[type]++; toRun.push(command); } else { toDefer.push(command); } } this.queue = toDefer; toRun.forEach(({ runCommand }) => runCommand()); } transcodeVideo( ffmpegCommand: string, inputVideoDuration: number, - onTranscodingProgress: (percent: number) => void, + onTranscodingProgress?: (percent: number) => void, ): Promise<{ rc: number, lastStats: ?FFmpegStatistics }> { const duration = inputVideoDuration > 0 ? inputVideoDuration : 0.001; const wrappedCommand = async () => { RNFFmpegConfig.resetStatistics(); let lastStats; - RNFFmpegConfig.enableStatisticsCallback( - (statisticsData: FFmpegStatistics) => { - lastStats = statisticsData; - const { time } = statisticsData; - onTranscodingProgress(time / 1000 / duration); - }, - ); + if (onTranscodingProgress) { + RNFFmpegConfig.enableStatisticsCallback( + (statisticsData: FFmpegStatistics) => { + lastStats = statisticsData; + const { time } = statisticsData; + onTranscodingProgress(time / 1000 / duration); + }, + ); + } const ffmpegResult = await RNFFmpeg.execute(ffmpegCommand); return { ...ffmpegResult, lastStats }; }; return this.queueCommand('process', wrappedCommand); } generateThumbnail(videoPath: string, outputPath: string): Promise { const wrappedCommand = () => FFmpeg.innerGenerateThumbnail(videoPath, outputPath); return this.queueCommand('process', wrappedCommand); } static async innerGenerateThumbnail( videoPath: string, outputPath: string, ): Promise { const thumbnailCommand = `-i ${videoPath} -frames 1 -f singlejpeg ${outputPath}`; const { rc } = await RNFFmpeg.execute(thumbnailCommand); return rc; } getVideoInfo(path: string): Promise { const wrappedCommand = () => FFmpeg.innerGetVideoInfo(path); return this.queueCommand('probe', wrappedCommand); } static async innerGetVideoInfo(path: string): Promise { const info = await RNFFprobe.getMediaInformation(path); const videoStreamInfo = FFmpeg.getVideoStreamInfo(info); const codec = videoStreamInfo?.codec; const dimensions = videoStreamInfo && videoStreamInfo.dimensions; const format = info.format.split(','); const duration = info.duration / 1000; return { codec, format, dimensions, duration }; } static getVideoStreamInfo( info: Object, ): ?{ +codec: string, +dimensions: Dimensions } { if (!info.streams) { return null; } for (const stream of info.streams) { if (stream.type === 'video') { const codec: string = stream.codec; const width: number = stream.width; const height: number = stream.height; return { codec, dimensions: { width, height } }; } } return null; } hasMultipleFrames(path: string): Promise { const wrappedCommand = () => FFmpeg.innerHasMultipleFrames(path); return this.queueCommand('probe', wrappedCommand); } static async innerHasMultipleFrames(path: string): Promise { await RNFFprobe.execute(getHasMultipleFramesProbeCommand(path)); const probeOutput = await RNFFmpegConfig.getLastCommandOutput(); const numFrames = parseInt(probeOutput.lastCommandOutput); return numFrames > 1; } } const ffmpeg: FFmpeg = new FFmpeg(); export { ffmpeg }; diff --git a/native/media/media-utils.js b/native/media/media-utils.js index 63817189c..8c0192d7f 100644 --- a/native/media/media-utils.js +++ b/native/media/media-utils.js @@ -1,264 +1,264 @@ // @flow import invariant from 'invariant'; import { Image } from 'react-native'; import { pathFromURI, sanitizeFilename } from 'lib/media/file-utils.js'; import type { Dimensions, MediaMissionStep, MediaMissionFailure, NativeMediaSelection, } from 'lib/types/media-types.js'; import { fetchFileInfo } from './file-utils.js'; import { processImage } from './image-utils.js'; import { saveMedia } from './save-media.js'; import { processVideo } from './video-utils.js'; type MediaProcessConfig = { +hasWiFi: boolean, // Blocks return until we can confirm result has the correct MIME +finalFileHeaderCheck?: boolean, - +onTranscodingProgress: (percent: number) => void, + +onTranscodingProgress?: (percent: number) => void, }; type SharedMediaResult = { +success: true, +uploadURI: string, +shouldDisposePath: ?string, +filename: string, +mime: string, +dimensions: Dimensions, }; export type MediaResult = | { +mediaType: 'photo', ...SharedMediaResult } | { +mediaType: 'video', ...SharedMediaResult, +uploadThumbnailURI: string, +loop: boolean, } | { +mediaType: 'encrypted_photo', ...SharedMediaResult, +encryptionKey: string, } | { +mediaType: 'encrypted_video', ...SharedMediaResult, +encryptionKey: string, +thumbnailEncryptionKey: string, +uploadThumbnailURI: string, +loop: boolean, }; function processMedia( selection: NativeMediaSelection, config: MediaProcessConfig, ): { resultPromise: Promise, reportPromise: Promise<$ReadOnlyArray>, } { let resolveResult; const sendResult = result => { if (resolveResult) { resolveResult(result); } }; const reportPromise = innerProcessMedia(selection, config, sendResult); const resultPromise = new Promise(resolve => { resolveResult = resolve; }); return { reportPromise, resultPromise }; } async function innerProcessMedia( selection: NativeMediaSelection, config: MediaProcessConfig, sendResult: (result: MediaMissionFailure | MediaResult) => void, ): Promise<$ReadOnlyArray> { let initialURI = null, uploadURI = null, uploadThumbnailURI = null, dimensions = selection.dimensions, mediaType = null, mime = null, loop = false, resultReturned = false; const returnResult = (failure?: MediaMissionFailure) => { invariant( !resultReturned, 'returnResult called twice in innerProcessMedia', ); resultReturned = true; if (failure) { sendResult(failure); return; } invariant( uploadURI && mime && mediaType, 'missing required fields in returnResult', ); const shouldDisposePath = initialURI !== uploadURI ? pathFromURI(uploadURI) : null; const filename = sanitizeFilename(selection.filename, mime); if (mediaType === 'video') { invariant(uploadThumbnailURI, 'video should have uploadThumbnailURI'); sendResult({ success: true, uploadURI, uploadThumbnailURI, shouldDisposePath, filename, mime, mediaType, dimensions, loop, }); } else { sendResult({ success: true, uploadURI, shouldDisposePath, filename, mime, mediaType, dimensions, }); } }; const steps = [], completeBeforeFinish = []; const finish = async (failure?: MediaMissionFailure) => { if (!resultReturned) { returnResult(failure); } await Promise.all(completeBeforeFinish); return steps; }; if (selection.captureTime && selection.retries === 0) { const { uri } = selection; invariant( pathFromURI(uri), `captured URI ${uri} should use file:// scheme`, ); completeBeforeFinish.push( (async () => { const { reportPromise } = saveMedia(uri); const saveMediaSteps = await reportPromise; steps.push(...saveMediaSteps); })(), ); } const possiblyPhoto = selection.step.startsWith('photo_'); const mediaNativeID = selection.mediaNativeID ? selection.mediaNativeID : null; const { steps: fileInfoSteps, result: fileInfoResult } = await fetchFileInfo( selection.uri, { mediaNativeID }, { orientation: possiblyPhoto, mime: true, mediaType: true, }, ); steps.push(...fileInfoSteps); if (!fileInfoResult.success) { return await finish(fileInfoResult); } const { orientation, fileSize } = fileInfoResult; ({ uri: initialURI, mime, mediaType } = fileInfoResult); if (!mime || !mediaType) { return await finish({ success: false, reason: 'media_type_fetch_failed', detectedMIME: mime, }); } if (mediaType === 'video') { const { steps: videoSteps, result: videoResult } = await processVideo( { uri: initialURI, mime, filename: selection.filename, fileSize, dimensions, hasWiFi: config.hasWiFi, }, { onTranscodingProgress: config.onTranscodingProgress, }, ); steps.push(...videoSteps); if (!videoResult.success) { return await finish(videoResult); } ({ uri: uploadURI, thumbnailURI: uploadThumbnailURI, mime, dimensions, loop, } = videoResult); } else if (mediaType === 'photo') { const { steps: imageSteps, result: imageResult } = await processImage({ uri: initialURI, dimensions, mime, fileSize, orientation, }); steps.push(...imageSteps); if (!imageResult.success) { return await finish(imageResult); } ({ uri: uploadURI, mime, dimensions } = imageResult); } else { invariant(false, `unknown mediaType ${mediaType}`); } if (uploadURI === initialURI) { return await finish(); } if (!config.finalFileHeaderCheck) { returnResult(); } const { steps: finalFileInfoSteps, result: finalFileInfoResult } = await fetchFileInfo(uploadURI, undefined, { mime: true }); steps.push(...finalFileInfoSteps); if (!finalFileInfoResult.success) { return await finish(finalFileInfoResult); } if (finalFileInfoResult.mime && finalFileInfoResult.mime !== mime) { return await finish({ success: false, reason: 'mime_type_mismatch', reportedMediaType: mediaType, reportedMIME: mime, detectedMIME: finalFileInfoResult.mime, }); } return await finish(); } function getDimensions(uri: string): Promise { return new Promise((resolve, reject) => { Image.getSize( uri, (width: number, height: number) => resolve({ height, width }), reject, ); }); } export { processMedia, getDimensions }; diff --git a/native/media/video-utils.js b/native/media/video-utils.js index db1e7809e..2bb169314 100644 --- a/native/media/video-utils.js +++ b/native/media/video-utils.js @@ -1,282 +1,282 @@ // @flow import invariant from 'invariant'; import { Platform } from 'react-native'; import filesystem from 'react-native-fs'; import { mediaConfig, pathFromURI } from 'lib/media/file-utils.js'; import { getVideoProcessingPlan } from 'lib/media/video-utils.js'; import type { ProcessPlan } from 'lib/media/video-utils.js'; import type { MediaMissionStep, MediaMissionFailure, VideoProbeMediaMissionStep, TranscodeVideoMediaMissionStep, VideoGenerateThumbnailMediaMissionStep, Dimensions, } from 'lib/types/media-types.js'; import { getMessageForException } from 'lib/utils/errors.js'; import { ffmpeg } from './ffmpeg.js'; import { temporaryDirectoryPath } from './file-utils.js'; // These are some numbers I sorta kinda made up // We should try to calculate them on a per-device basis const uploadSpeeds = Object.freeze({ wifi: 4096, // in KiB/s cellular: 512, // in KiB/s }); const clientTranscodeSpeed = 1.15; // in seconds of video transcoded per second type ProcessVideoInfo = { +uri: string, +mime: string, +filename: ?string, +fileSize: number, +dimensions: Dimensions, +hasWiFi: boolean, }; type VideoProcessConfig = { - +onTranscodingProgress: (percent: number) => void, + +onTranscodingProgress?: (percent: number) => void, }; type ProcessVideoResponse = { +success: true, +uri: string, +thumbnailURI: string, +mime: string, +dimensions: Dimensions, +loop: boolean, }; async function processVideo( input: ProcessVideoInfo, config: VideoProcessConfig, ): Promise<{ steps: $ReadOnlyArray, result: MediaMissionFailure | ProcessVideoResponse, }> { const steps = []; const path = pathFromURI(input.uri); invariant(path, `could not extract path from ${input.uri}`); const initialCheckStep = await checkVideoInfo(path); steps.push(initialCheckStep); if (!initialCheckStep.success || !initialCheckStep.duration) { return { steps, result: { success: false, reason: 'video_probe_failed' } }; } const { validFormat, duration } = initialCheckStep; const plan = getVideoProcessingPlan({ inputPath: path, inputHasCorrectContainerAndCodec: validFormat, inputFileSize: input.fileSize, inputFilename: input.filename, inputMimeType: input.mime, inputDuration: duration, inputDimensions: input.dimensions, outputDirectory: temporaryDirectoryPath, // We want ffmpeg to use hardware-accelerated encoders. On iOS we can do // this using VideoToolbox, but ffmpeg on Android is still missing // MediaCodec encoding support: https://trac.ffmpeg.org/ticket/6407 outputCodec: Platform.select({ ios: 'h264_videotoolbox', //android: 'h264_mediacodec', default: 'h264', }), clientConnectionInfo: { hasWiFi: input.hasWiFi, speed: input.hasWiFi ? uploadSpeeds.wifi : uploadSpeeds.cellular, }, clientTranscodeSpeed, }); if (plan.action === 'reject') { return { steps, result: plan.failure }; } if (plan.action === 'none') { const thumbnailStep = await generateThumbnail(path, plan.thumbnailPath); steps.push(thumbnailStep); if (!thumbnailStep.success) { unlink(plan.thumbnailPath); return { steps, result: { success: false, reason: 'video_generate_thumbnail_failed' }, }; } return { steps, result: { success: true, uri: input.uri, thumbnailURI: `file://${plan.thumbnailPath}`, mime: 'video/mp4', dimensions: input.dimensions, loop: false, }, }; } const [thumbnailStep, transcodeStep] = await Promise.all([ generateThumbnail(path, plan.thumbnailPath), transcodeVideo(plan, duration, config.onTranscodingProgress), ]); steps.push(thumbnailStep, transcodeStep); if (!thumbnailStep.success) { unlink(plan.outputPath); unlink(plan.thumbnailPath); return { steps, result: { success: false, reason: 'video_generate_thumbnail_failed', }, }; } if (!transcodeStep.success) { unlink(plan.outputPath); unlink(plan.thumbnailPath); return { steps, result: { success: false, reason: 'video_transcode_failed', }, }; } const transcodeProbeStep = await checkVideoInfo(plan.outputPath); steps.push(transcodeProbeStep); if (!transcodeProbeStep.validFormat) { unlink(plan.outputPath); unlink(plan.thumbnailPath); return { steps, result: { success: false, reason: 'video_transcode_failed' }, }; } const dimensions = transcodeProbeStep.dimensions ? transcodeProbeStep.dimensions : input.dimensions; const loop = !!( mediaConfig[input.mime] && mediaConfig[input.mime].videoConfig && mediaConfig[input.mime].videoConfig.loop ); return { steps, result: { success: true, uri: `file://${plan.outputPath}`, thumbnailURI: `file://${plan.thumbnailPath}`, mime: 'video/mp4', dimensions, loop, }, }; } async function generateThumbnail( path: string, thumbnailPath: string, ): Promise { const thumbnailStart = Date.now(); const thumbnailReturnCode = await ffmpeg.generateThumbnail( path, thumbnailPath, ); const thumbnailGenerationSuccessful = thumbnailReturnCode === 0; return { step: 'video_generate_thumbnail', success: thumbnailGenerationSuccessful, time: Date.now() - thumbnailStart, returnCode: thumbnailReturnCode, thumbnailURI: thumbnailPath, }; } async function transcodeVideo( plan: ProcessPlan, duration: number, - onProgressCallback: number => void, + onProgressCallback?: number => void, ): Promise { const transcodeStart = Date.now(); let returnCode, newPath, stats, success = false, exceptionMessage; try { const { rc, lastStats } = await ffmpeg.transcodeVideo( plan.ffmpegCommand, duration, onProgressCallback, ); success = rc === 0; if (success) { returnCode = rc; newPath = plan.outputPath; stats = lastStats; } } catch (e) { exceptionMessage = getMessageForException(e); } return { step: 'video_ffmpeg_transcode', success, exceptionMessage, time: Date.now() - transcodeStart, returnCode, newPath, stats, }; } async function checkVideoInfo( path: string, ): Promise { let codec, format, dimensions, duration, success = false, validFormat = false, exceptionMessage; const start = Date.now(); try { ({ codec, format, dimensions, duration } = await ffmpeg.getVideoInfo(path)); success = true; validFormat = codec === 'h264' && format.includes('mp4'); } catch (e) { exceptionMessage = getMessageForException(e); } return { step: 'video_probe', success, exceptionMessage, time: Date.now() - start, path, validFormat, duration, codec, format, dimensions, }; } async function unlink(path: string) { try { await filesystem.unlink(path); } catch {} } function formatDuration(seconds: number): string { const mm = Math.floor(seconds / 60); const ss = (seconds % 60).toFixed(0).padStart(2, '0'); return `${mm}:${ss}`; } export { processVideo, formatDuration };