diff --git a/native/media/ffmpeg.js b/native/media/ffmpeg.js index f39a3f407..27cb82539 100644 --- a/native/media/ffmpeg.js +++ b/native/media/ffmpeg.js @@ -1,142 +1,154 @@ // @flow import { RNFFmpeg, RNFFprobe, RNFFmpegConfig } from 'react-native-ffmpeg'; import { getHasMultipleFramesProbeCommand } from 'lib/media/video-utils'; import type { FFmpegStatistics } from 'lib/types/media-types'; const maxSimultaneousCalls = { process: 1, probe: 1, }; type CallCounter = typeof maxSimultaneousCalls; type QueuedCommandType = $Keys; type QueuedCommand = {| type: QueuedCommandType, runCommand: () => Promise, |}; class FFmpeg { queue: QueuedCommand[] = []; currentCalls: CallCounter = { process: 0, probe: 0 }; queueCommand( type: QueuedCommandType, wrappedCommand: () => Promise, ): Promise { return new Promise((resolve, reject) => { const runCommand = async () => { try { const result = await wrappedCommand(); this.currentCalls[type]--; this.possiblyRunCommands(); resolve(result); } catch (e) { reject(e); } }; this.queue.push({ type, runCommand }); this.possiblyRunCommands(); }); } possiblyRunCommands() { let openSlots = {}; for (let type in this.currentCalls) { const currentCalls = this.currentCalls[type]; const maxCalls = maxSimultaneousCalls[type]; const callsLeft = maxCalls - currentCalls; if (!callsLeft) { return; } else if (currentCalls) { openSlots = { [type]: callsLeft }; break; } else { openSlots[type] = callsLeft; } } const toDefer = [], toRun = []; for (let command of this.queue) { const type: string = command.type; if (openSlots[type]) { openSlots = { [type]: openSlots[type] - 1 }; this.currentCalls[type]++; toRun.push(command); } else { toDefer.push(command); } } this.queue = toDefer; toRun.forEach(({ runCommand }) => runCommand()); } - process( + transcodeVideo( ffmpegCommand: string, inputVideoDuration: number, onTranscodingProgress: (percent: number) => void, ) { const duration = inputVideoDuration > 0 ? inputVideoDuration : 0.001; const wrappedCommand = async () => { RNFFmpegConfig.resetStatistics(); let lastStats; RNFFmpegConfig.enableStatisticsCallback( (statisticsData: FFmpegStatistics) => { lastStats = statisticsData; const { time } = statisticsData; onTranscodingProgress(time / 1000 / duration); }, ); const ffmpegResult = await RNFFmpeg.execute(ffmpegCommand); return { ...ffmpegResult, lastStats }; }; return this.queueCommand('process', wrappedCommand); } + generateThumbnail(videoPath: string, outputPath: string) { + const wrappedCommand = () => + FFmpeg.innerGenerateThumbnail(videoPath, outputPath); + return this.queueCommand('process', wrappedCommand); + } + + static async innerGenerateThumbnail(videoPath: string, outputPath: string) { + const thumbnailCommand = `-i ${videoPath} -frames 1 -f singlejpeg ${outputPath}`; + const { rc } = await RNFFmpeg.execute(thumbnailCommand); + return rc; + } + getVideoInfo(path: string) { const wrappedCommand = () => FFmpeg.innerGetVideoInfo(path); return this.queueCommand('probe', wrappedCommand); } static async innerGetVideoInfo(path: string) { const info = await RNFFprobe.getMediaInformation(path); const videoStreamInfo = FFmpeg.getVideoStreamInfo(info); const codec = videoStreamInfo && videoStreamInfo.codec; const dimensions = videoStreamInfo && videoStreamInfo.dimensions; const format = info.format.split(','); const duration = info.duration / 1000; return { codec, format, dimensions, duration }; } static getVideoStreamInfo(info: Object) { if (!info.streams) { return null; } for (let stream of info.streams) { if (stream.type === 'video') { const { codec, width, height } = stream; return { codec, dimensions: { width, height } }; } } return null; } hasMultipleFrames(path: string) { const wrappedCommand = () => FFmpeg.innerHasMultipleFrames(path); return this.queueCommand('probe', wrappedCommand); } static async innerHasMultipleFrames(path: string) { await RNFFprobe.execute(getHasMultipleFramesProbeCommand(path)); const probeOutput = await RNFFmpegConfig.getLastCommandOutput(); const numFrames = parseInt(probeOutput.lastCommandOutput); return numFrames > 1; } } const ffmpeg = new FFmpeg(); export { ffmpeg }; diff --git a/native/media/video-utils.js b/native/media/video-utils.js index f6aa0cc6c..8d76100f3 100644 --- a/native/media/video-utils.js +++ b/native/media/video-utils.js @@ -1,224 +1,224 @@ // @flow import invariant from 'invariant'; import { Platform } from 'react-native'; import filesystem from 'react-native-fs'; import { mediaConfig, pathFromURI } from 'lib/media/file-utils'; import { getVideoProcessingPlan } from 'lib/media/video-utils'; import type { MediaMissionStep, MediaMissionFailure, VideoProbeMediaMissionStep, Dimensions, } from 'lib/types/media-types'; import { getMessageForException } from 'lib/utils/errors'; import { ffmpeg } from './ffmpeg'; // These are some numbers I sorta kinda made up // We should try to calculate them on a per-device basis const uploadSpeeds = Object.freeze({ wifi: 4096, // in KiB/s cellular: 512, // in KiB/s }); const clientTranscodeSpeed = 1.15; // in seconds of video transcoded per second type ProcessVideoInfo = {| uri: string, mime: string, filename: string, fileSize: number, dimensions: Dimensions, hasWiFi: boolean, |}; type VideoProcessConfig = {| +onTranscodingProgress: (percent: number) => void, |}; type ProcessVideoResponse = {| success: true, uri: string, mime: string, dimensions: Dimensions, loop: boolean, |}; async function processVideo( input: ProcessVideoInfo, config: VideoProcessConfig, ): Promise<{| steps: $ReadOnlyArray, result: MediaMissionFailure | ProcessVideoResponse, |}> { const steps = []; const path = pathFromURI(input.uri); invariant(path, `could not extract path from ${input.uri}`); const initialCheckStep = await checkVideoInfo(path); steps.push(initialCheckStep); if (!initialCheckStep.success || !initialCheckStep.duration) { return { steps, result: { success: false, reason: 'video_probe_failed' } }; } const { validFormat, duration } = initialCheckStep; const plan = getVideoProcessingPlan({ inputPath: path, inputHasCorrectContainerAndCodec: validFormat, inputFileSize: input.fileSize, inputFilename: input.filename, inputDuration: duration, inputDimensions: input.dimensions, outputDirectory: Platform.select({ ios: filesystem.TemporaryDirectoryPath, default: `${filesystem.TemporaryDirectoryPath}/`, }), // We want ffmpeg to use hardware-accelerated encoders. On iOS we can do // this using VideoToolbox, but ffmpeg on Android is still missing // MediaCodec encoding support: https://trac.ffmpeg.org/ticket/6407 outputCodec: Platform.select({ ios: 'h264_videotoolbox', //android: 'h264_mediacodec', default: 'h264', }), clientConnectionInfo: { hasWiFi: input.hasWiFi, speed: input.hasWiFi ? uploadSpeeds.wifi : uploadSpeeds.cellular, }, clientTranscodeSpeed, }); if (plan.action === 'reject') { return { steps, result: plan.failure }; } if (plan.action === 'none') { return { steps, result: { success: true, uri: input.uri, mime: 'video/mp4', dimensions: input.dimensions, loop: false, }, }; } const { outputPath, ffmpegCommand } = plan; let returnCode, newPath, stats, success = false, exceptionMessage; const start = Date.now(); try { - const { rc, lastStats } = await ffmpeg.process( + const { rc, lastStats } = await ffmpeg.transcodeVideo( ffmpegCommand, duration, config.onTranscodingProgress, ); success = rc === 0; if (success) { returnCode = rc; newPath = outputPath; stats = lastStats; } } catch (e) { exceptionMessage = getMessageForException(e); } if (!success) { unlink(outputPath); } steps.push({ step: 'video_ffmpeg_transcode', success, exceptionMessage, time: Date.now() - start, returnCode, newPath, stats, }); if (!success) { return { steps, result: { success: false, reason: 'video_transcode_failed' }, }; } const transcodeProbeStep = await checkVideoInfo(outputPath); steps.push(transcodeProbeStep); if (!transcodeProbeStep.validFormat) { unlink(outputPath); return { steps, result: { success: false, reason: 'video_transcode_failed' }, }; } const dimensions = transcodeProbeStep.dimensions ? transcodeProbeStep.dimensions : input.dimensions; const loop = !!( mediaConfig[input.mime] && mediaConfig[input.mime].videoConfig && mediaConfig[input.mime].videoConfig.loop ); return { steps, result: { success: true, uri: `file://${outputPath}`, mime: 'video/mp4', dimensions, loop, }, }; } async function checkVideoInfo( path: string, ): Promise { let codec, format, dimensions, duration, success = false, validFormat = false, exceptionMessage; const start = Date.now(); try { ({ codec, format, dimensions, duration } = await ffmpeg.getVideoInfo(path)); success = true; validFormat = codec === 'h264' && format.includes('mp4'); } catch (e) { exceptionMessage = getMessageForException(e); } return { step: 'video_probe', success, exceptionMessage, time: Date.now() - start, path, validFormat, duration, codec, format, dimensions, }; } async function unlink(path: string) { try { await filesystem.unlink(path); } catch {} } function formatDuration(seconds: number) { const mm = Math.floor(seconds / 60); const ss = (seconds % 60).toFixed(0).padStart(2, '0'); return `${mm}:${ss}`; } export { processVideo, formatDuration };