如何创建一个具有实际持续时间的虚拟AVPlayerItem?

如何创建一个具有实际持续时间的虚拟AVPlayerItem?

问题描述:

我使用AVPlayerAVSynchronizedLayer上播放CAKeyFrameAnimations。为了让玩家在动画期间不玩AVAsset,我将AVPlayerItemforwardPlaybackEndTime设置为所需的动画持续时间。不幸。在forwardPlaybackEndTime期间seekToTime:似乎是不可能的,因为AVPlayer总是回到开头。可能是因为它试图寻找AVplayerItem的持续时间。如何创建一个具有实际持续时间的虚拟AVPlayerItem?

我该如何创建一个具有真实持续时间的虚拟AVPlayerItem来欺骗AVPlayer播放一些空的AVPlayerItem并让我seekToTime

不幸的是,seekToTime只会寻找AVPlayerItem's持续时间。因此需要创建一个虚拟玩家项目来产生一个可寻找的持续时间。为了快速做到这一点,需要创建一个虚拟AVplayerItem。以下是生成此类项目的实施示例。这很长,但它是必需的。祝你好运!

@interface FakeAsset() 

+ (CVPixelBufferRef)blackImagePixelBuffer; 

@end 

@implementation FakeAsset 

+ (void)assetWithDuration:(CMTime)duration 
     completitionBlock:(void (^)(AVAsset *))callBack 
{ 
    NSError * error  = nil; 
    NSString * assetPath = nil; 
    NSUInteger i   = 0; 
    do 
    { 
     assetPath = 
     [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"dummyAsset%i.m4v",i]]; 
     i++; 
    } 
    while ([[NSFileManager defaultManager] fileExistsAtPath:assetPath 
               isDirectory:NO]); 

    NSURL * fileURL = [NSURL fileURLWithPath:assetPath]; 

    NSParameterAssert(fileURL); 

    AVAssetWriter * videoWriter = 
    [[AVAssetWriter alloc] initWithURL:fileURL 
           fileType:AVFileTypeAppleM4V 
           error:&error]; 
    NSParameterAssert(videoWriter); 

    NSDictionary * compression = 
    @{ 
    AVVideoAverageBitRateKey  : @10, 
    AVVideoProfileLevelKey  : AVVideoProfileLevelH264Main31, 
    AVVideoMaxKeyFrameIntervalKey : @300 
    }; 

    NSDictionary * outputSettings = 
    @{ 
    AVVideoCodecKey     : AVVideoCodecH264, 
    AVVideoCompressionPropertiesKey : compression, 
    AVVideoWidthKey     : @120, 
    AVVideoHeightKey    : @80 
    }; 

    AVAssetWriterInput * videoWriterInput = 
    [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo 
             outputSettings:outputSettings]; 
    NSParameterAssert(videoWriterInput); 

    NSDictionary * parameters = 
    @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32ARGB), 
     (NSString *)kCVPixelBufferWidthKey   : @120, 
     (NSString *)kCVPixelBufferHeightKey   : @80 
     }; 

    AVAssetWriterInputPixelBufferAdaptor * adaptor = 
    [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput 
                    sourcePixelBufferAttributes:parameters]; 
    NSParameterAssert(adaptor); 
    NSParameterAssert([videoWriter canAddInput:videoWriterInput]); 

    videoWriterInput.expectsMediaDataInRealTime = NO; 

    [videoWriter addInput:videoWriterInput]; 

    NSParameterAssert([videoWriter startWriting]); 

    [videoWriter startSessionAtSourceTime:kCMTimeZero]; 

    dispatch_queue_t dispatchQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); 

    [videoWriterInput requestMediaDataWhenReadyOnQueue:dispatchQueue 
              usingBlock:^ 
    { 
     int frame = 0; 
     while (videoWriterInput.isReadyForMoreMediaData) 
     { 
      if (frame < 2) 
      { 
       CMTime frameTime = frame ? duration : kCMTimeZero; 
       CVPixelBufferRef buffer = [self blackImagePixelBuffer]; 

       [adaptor appendPixelBuffer:buffer 
         withPresentationTime:frameTime]; 

       CVBufferRelease(buffer); 

       ++frame; 
      } 
      else 
      { 
       [videoWriterInput markAsFinished]; 
       [videoWriter endSessionAtSourceTime:duration]; 

       dispatch_async(dispatch_get_main_queue(),^
       { 
        [videoWriter finishWritingWithCompletionHandler:^() 
        { 
         NSLog(@"did finish writing the video!"); 
         AVURLAsset * asset = 
         [AVURLAsset assetWithURL:videoWriter.outputURL]; 
         callBack(asset); 
        }]; 
       }); 
       break; 
      } 
     } 
    }]; 
} 

+ (CVPixelBufferRef)blackImagePixelBuffer 
{ 
    NSDictionary * options = 
    @{ 
     (id)kCVPixelBufferCGImageCompatibilityKey   : @YES, 
     (id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES 
     }; 

    CVPixelBufferRef pxbuffer = NULL; 
    CVReturn status = 
    CVPixelBufferCreate(kCFAllocatorDefault, 120, 80, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options, &pxbuffer); 

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

    CVPixelBufferLockBaseAddress(pxbuffer, 0); 

    void * pxdata = CVPixelBufferGetBaseAddress(pxbuffer); 
    NSParameterAssert(pxdata != NULL); 

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 
    //kCGImageAlphaPremultipliedFirst 
    CGContextRef context = CGBitmapContextCreate(pxdata, 120, 80, 8, 4*120, rgbColorSpace, (CGBitmapInfo)kCGImageAlphaNoneSkipFirst); 

    NSParameterAssert(context); 
    CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor); 
    CGContextFillRect(context,CGRectMake(0.f, 0.f, 120.f, 80.f)); 
    CGColorSpaceRelease(rgbColorSpace); 
    CGContextRelease(context); 

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0); 

    return pxbuffer; 
} 

我认为这将是更容易,如果你只是在你把使用insertEmptyTimeRange:所需的持续时间的空范围AVMutableCompositionTrack创建AVMutableComposition

然后使用这个组合来创建您的AVPlayerItem - playerItemWithAsset:,因为它是AVAsset的子类。

这不需要任何生成和写入然后阅读,代码也少得多。

+0

除了您的解决方案无法正常工作。我很久以前就试过了。空时间范围是...空。 –

+0

在我给出答案之前,我刚刚尝试过。我正在开发一个处理视频的应用程序。我只是为了测试我的作文而改变,而不是添加一个真正的资产插入一个空的范围,它正在工作。我甚至试图仅从资产中留下音频,这也是可以的。我知道这是一个空的时间范围,它仍然是一个视频轨道,并以此作为行为。我唯一注意到的事情是,玩家和玩家物品的可玩状态比使用真实资产需要更多的时间。 – kacho