覆盖AVFoundation两个视频

问题描述:

我想重叠两个视频,前景视频是有点透明。我一直在关注Apple Docs以及This tutorial覆盖AVFoundation两个视频

每当我尝试将两个相同的视频通过my code它不会崩溃;然而,当我尝试喂养它两个不同的影片,我收到此错误:

VideoMaskingUtils.exportVideo Error: Optional(Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.}) 
VideoMaskingUtils.exportVideo Description: <AVAssetExportSession: 0x1556be30, asset = <AVMutableComposition: 0x15567f10 tracks = (
"<AVMutableCompositionTrack: 0x15658030 trackID = 1, mediaType = vide, editCount = 1>", 
"<AVMutableCompositionTrack: 0x1556e250 trackID = 2, mediaType = vide, editCount = 1>" 
)>, presetName = AVAssetExportPresetHighestQuality, outputFileType = public.mpeg-4 
Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.} 

我明白you can't save a video with an alpha channel iOS上的 - 我想这两个视频压扁成一个不透明的视频。

当试图重叠两个视频并使用CATransforms应用PiP风格时,它崩溃;只是重叠他们(没有阿尔法或任何其他影响应用工作) 任何帮助表示赞赏。

这里是我的代码(在这两种方法):

class func overlay(video firstAsset: AVURLAsset, withSecondVideo secondAsset: AVURLAsset, andAlpha alpha: Float) { 

    let mixComposition = AVMutableComposition() 

    let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid) 
    let secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid) 


    guard let firstMediaTrack = firstAsset.tracksWithMediaType(AVMediaTypeVideo).first else { return } 
    guard let secondMediaTrack = secondAsset.tracksWithMediaType(AVMediaTypeVideo).first else { return } 
    do { 
     try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration), ofTrack: firstMediaTrack, atTime: kCMTimeZero) 
     try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration), ofTrack: secondMediaTrack, atTime: kCMTimeZero) 
    } catch (let error) { 
     print(error) 
    } 

    let width = max(firstMediaTrack.naturalSize.width, secondMediaTrack.naturalSize.width) 
    let height = max(firstMediaTrack.naturalSize.height, secondMediaTrack.naturalSize.height) 

    let videoComposition = AVMutableVideoComposition() 
    videoComposition.renderSize = CGSizeMake(width, height) 
    videoComposition.frameDuration = firstMediaTrack.minFrameDuration 


    let firstApproach = false 
    if firstApproach { 
     let mainInstruction = AVMutableVideoCompositionInstruction() 
     mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration) 
     mainInstruction.backgroundColor = UIColor.redColor().CGColor 

     let firstlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstTrack) 
     firstlayerInstruction.setTransform(firstAsset.preferredTransform, atTime: kCMTimeZero) 

     let secondInstruction = AVMutableVideoCompositionInstruction() 
     secondInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, secondAsset.duration) 
     let backgroundColor = UIColor(colorLiteralRed: 1.0, green: 1.0, blue: 1.0, alpha: alpha) 
     secondInstruction.backgroundColor = backgroundColor.CGColor 

     let secondlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondTrack) 
     secondlayerInstruction.setTransform(secondAsset.preferredTransform, atTime: kCMTimeZero) 

     secondInstruction.layerInstructions = [secondlayerInstruction] 

     mainInstruction.layerInstructions = [firstlayerInstruction]//, secondlayerInstruction] 

     videoComposition.instructions = [mainInstruction, secondInstruction] 

    } else { 
     let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstMediaTrack) 
     firstLayerInstruction.setTransform(firstMediaTrack.preferredTransform, atTime: kCMTimeZero) 
     firstLayerInstruction.setOpacity(1.0, atTime: kCMTimeZero) 

     let secondlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondMediaTrack) 
     secondlayerInstruction.setTransform(secondMediaTrack.preferredTransform, atTime: kCMTimeZero) 
     secondlayerInstruction.setOpacity(alpha, atTime: kCMTimeZero) 


     let instruction = AVMutableVideoCompositionInstruction() 
     instruction.timeRange = CMTimeRangeMake(kCMTimeZero, min(firstAsset.duration, secondAsset.duration)) 
     instruction.layerInstructions = [firstLayerInstruction, secondlayerInstruction] 

     videoComposition.instructions = [instruction] 
    } 



    let outputUrl = VideoMaskingUtils.getPathForTempFileNamed("output.mov") 

    VideoMaskingUtils.exportCompositedVideo(mixComposition, toURL: outputUrl, withVideoComposition: videoComposition) 

    VideoMaskingUtils.removeTempFileAtPath(outputUrl.absoluteString) 
} 

这是我exportCompositedVideo功能。

private class func exportCompositedVideo(compiledVideo: AVMutableComposition, toURL outputUrl: NSURL, withVideoComposition videoComposition: AVMutableVideoComposition) { 
    guard let exporter = AVAssetExportSession(asset: compiledVideo, presetName: AVAssetExportPresetHighestQuality) else { return } 
    exporter.outputURL = outputUrl 
    exporter.videoComposition = videoComposition 
    exporter.outputFileType = AVFileTypeQuickTimeMovie 
    exporter.shouldOptimizeForNetworkUse = true 
    exporter.exportAsynchronouslyWithCompletionHandler({ 
     switch exporter.status { 
     case .Completed: 
      // we can be confident that there is a URL because 
      // we got this far. Otherwise it would've failed. 
      UISaveVideoAtPathToSavedPhotosAlbum(exporter.outputURL!.path!, nil, nil, nil) 
      print("VideoMaskingUtils.exportVideo SUCCESS!") 
      if exporter.error != nil { 
       print("VideoMaskingUtils.exportVideo Error: \(exporter.error)") 
       print("VideoMaskingUtils.exportVideo Description: \(exporter.description)") 
      } 

      NSNotificationCenter.defaultCenter().postNotificationName("videoExportDone", object: exporter.error) 
      break 

     case .Exporting: 
      let progress = exporter.progress 
      print("VideoMaskingUtils.exportVideo \(progress)") 

      NSNotificationCenter.defaultCenter().postNotificationName("videoExportProgress", object: progress) 
      break 

     case .Failed: 
      print("VideoMaskingUtils.exportVideo Error: \(exporter.error)") 
      print("VideoMaskingUtils.exportVideo Description: \(exporter.description)") 

      NSNotificationCenter.defaultCenter().postNotificationName("videoExportDone", object: exporter.error) 
      break 

     default: break 
     } 
    }) 
} 
+0

与此类似[悬而未决的问题] (http://*.com/questions/17909906/avfoundation-to-overlay-an-alpha-channel-video-on-anoth ER-视频)。 –

+0

请参阅上述问题的链接,解决方案是使用支持和alpha通道的编码方法,如链接中所述。 iOS默认无法使用H.264。 – MoDJ

min应该是max ...

替换该行

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, min(firstAsset.duration, secondAsset.duration)) 

这条线,也将努力:

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, max(firstAsset.duration, secondAsset.duration))