diff --git a/native/expo-modules/comm-expo-package/ios/MediaModule.swift b/native/expo-modules/comm-expo-package/ios/MediaModule.swift --- a/native/expo-modules/comm-expo-package/ios/MediaModule.swift +++ b/native/expo-modules/comm-expo-package/ios/MediaModule.swift @@ -36,6 +36,41 @@ var format: String = "N/A" } +enum H264Profile: String, Enumerable { + case baseline + case main + case high +} + +public struct TranscodeOptions: Record { + public init(){} + @Field + var width: Double = -1 + + @Field + var height: Double = -1 + + @Field + var bitrate: Int = -1 + + @Field + var profile: H264Profile = .high +} + +public struct TranscodeStats: Record { + public init(){} + @Field + var size: Int = 0 + + @Field + var duration: Int = 0 + + @Field + var speed: Double = 0 +} + +let TRANSCODE_PROGRESS_EVENT_NAME = "onTranscodeProgress" + public class MediaModule: Module { public func definition() -> ModuleDefinition { Name("MediaModule") @@ -43,6 +78,9 @@ AsyncFunction("getVideoInfo", getVideoInfo) AsyncFunction("hasMultipleFrames", hasMultipleFrames) AsyncFunction("generateThumbnail", generateThumbnail) + AsyncFunction("transcodeVideo", transcodeVideo) + + Events(TRANSCODE_PROGRESS_EVENT_NAME) } @@ -84,7 +122,7 @@ return count > 1 } - + private func generateThumbnail(inputPath: URL, outputPath: URL) throws { let asset = AVURLAsset(url: inputPath) let generator = AVAssetImageGenerator(asset: asset) @@ -106,6 +144,195 @@ throw ImageWriteFailedException(error.localizedDescription) } } + + var timer: Timer? = nil + + private func transcodeVideo(inputPath: URL, outputPath: URL, options: TranscodeOptions, promise: Promise) throws { + let asset = AVURLAsset(url: inputPath) + + guard let assetReader = try? AVAssetReader(asset: asset) else { + promise.reject(TranscodingFailed("Failed to initialize asset or asset reader.")) + return + } + + let totalDuration = asset.duration + + guard let videoTrack = asset.tracks(withMediaType: .video).first else { + promise.reject(TranscodingFailed("File has no video track")) + return + } + let audioTrack = asset.tracks(withMediaType: .audio).first + + let videoReaderSettings: [String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] + + let videoTrackOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings) + + if assetReader.canAdd(videoTrackOutput) { + assetReader.add(videoTrackOutput) + } + + var audioTrackOutput:AVAssetReaderTrackOutput? = nil + + if let audioTrack = audioTrack { + let audioOutputSettingsDict: [String : Any] = [ + AVFormatIDKey: kAudioFormatLinearPCM, + AVSampleRateKey: 44100 + ] + + let output = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: audioOutputSettingsDict) + if assetReader.canAdd(output) { + assetReader.add(output) + } + audioTrackOutput = output + } + + guard let assetWriter = try? AVAssetWriter(outputURL: outputPath, fileType: AVFileType.mp4) else { + promise.reject(TranscodingFailed("Error initializing AVAssetWriter")) + return + } + assetWriter.shouldOptimizeForNetworkUse = true + + guard let videoTrack = asset.tracks(withMediaType: .video).first else { + promise.reject(TranscodingFailed("No video track in file")) + return + } + let preferredTransform = videoTrack.preferredTransform + + let naturalFrame = CGRectMake(0, 0, options.width, options.height); + let preferredFrame = CGRectApplyAffineTransform(naturalFrame, preferredTransform); + + var videoCompressionSettings: [String: Any] = [ + AVVideoCodecKey: AVVideoCodecType.h264, + AVVideoWidthKey: preferredFrame.width, + AVVideoHeightKey: preferredFrame.height, + ] + + var videoCompressionProperties: [String: Any] = [:] + if(options.bitrate != -1) { + videoCompressionProperties[AVVideoAverageBitRateKey] = options.bitrate*1000 + } + videoCompressionProperties[AVVideoProfileLevelKey] = switch options.profile { + case .baseline: + AVVideoProfileLevelH264BaselineAutoLevel + case .main: + AVVideoProfileLevelH264MainAutoLevel + case .high: + AVVideoProfileLevelH264HighAutoLevel + } + + videoCompressionSettings[AVVideoCompressionPropertiesKey] = videoCompressionProperties + + let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoCompressionSettings) + + videoWriterInput.transform = preferredTransform + + if assetWriter.canAdd(videoWriterInput) { + assetWriter.add(videoWriterInput) + } + + var audioWriterInput: AVAssetWriterInput? = nil + + if(audioTrack != nil) { + let audioInputSettingsDict: [String:Any] = [AVFormatIDKey : kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey : 2, + AVSampleRateKey : 44100.0, + AVEncoderBitRateKey: 128000 + ] + + let input = AVAssetWriterInput(mediaType: .audio, outputSettings: audioInputSettingsDict) + if assetWriter.canAdd(input) { + assetWriter.add(input) + } + audioWriterInput = input + } + + + + assetReader.startReading() + assetWriter.startWriting() + + assetWriter.startSession(atSourceTime: CMTime.zero) + + var progress = 0.0 + DispatchQueue.main.async { + self.timer = Timer.scheduledTimer(withTimeInterval: 0.2, repeats: true) { [weak self] timer in + self?.sendEvent(TRANSCODE_PROGRESS_EVENT_NAME, [ + "progress": progress + ]) + } + } + + let startTime = CFAbsoluteTimeGetCurrent() + + let dispatchGroup = DispatchGroup() + + dispatchGroup.enter() + videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoQueue")) { + while videoWriterInput.isReadyForMoreMediaData { + if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { + let currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + progress = CMTimeGetSeconds(currentSampleTime) / CMTimeGetSeconds(totalDuration) + videoWriterInput.append(sampleBuffer) + } else { + videoWriterInput.markAsFinished() + dispatchGroup.leave() + break + } + } + } + + if let audioWriterInput = audioWriterInput, let audioTrackOutput = audioTrackOutput { + dispatchGroup.enter() + audioWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "audioQueue")) { + while audioWriterInput.isReadyForMoreMediaData { + if let sampleBuffer = audioTrackOutput.copyNextSampleBuffer() { + audioWriterInput.append(sampleBuffer) + } else { + audioWriterInput.markAsFinished() + dispatchGroup.leave() + break + } + } + } + } + + dispatchGroup.notify(queue: .main) { + assetWriter.finishWriting { + switch assetWriter.status { + case .failed: + promise.reject(TranscodingFailed(String(describing: assetWriter.error))) + break + case .completed: + print("Transcoding completed successfully.") + self.timer?.invalidate() + self.timer = nil + self.sendEvent(TRANSCODE_PROGRESS_EVENT_NAME, [ + "progress": 1 + ]) + let endTime = CFAbsoluteTimeGetCurrent() + promise.resolve(self.getStats(outputPath, startTime, endTime)) + break + default: + break + } + assetReader.cancelReading() + } + } + } + + private func getStats(_ url: URL, _ startTime: CFAbsoluteTime, _ endTime: CFAbsoluteTime) -> TranscodeStats { + let stats = TranscodeStats() + + let resourceValues = try? url.resourceValues(forKeys: [.fileSizeKey]) + let fileSize = resourceValues?.fileSize ?? 0 + stats.size = fileSize + + let asset = AVAsset(url: url) + let duration = CMTimeGetSeconds(asset.duration) + stats.duration = Int(duration) + stats.speed = duration/(endTime-startTime) + return stats + } } // MARK: - Exception definitions @@ -133,3 +360,9 @@ "Writing image data to the file has failed: \(param)" } } + +private class TranscodingFailed: GenericException<String> { + override var reason: String { + "Transcoding failed: \(param)" + } +}