如何在4秒快速循环AVPlayer到8秒?

问题描述:

我有一个AVPlayer在迅速3播放视频 - 问题是,我想使用循环从A到B秒(例如从4到8秒)这里是我的循环代码,但没有工作如何在4秒快速循环AVPlayer到8秒?

NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: self.Player.currentItem, queue: nil, using: { (_) in 
     DispatchQueue.main.async { 
      self.Player.seek(to: kCMTimeZero) 
      self.Player.play() 
      DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 4.0) { 
       // check if player is still playing 
       if self.Player.rate != 0 { 
        print("OK") 
        print("Player reached 4.0 seconds") 
        let timeScale = self.Player.currentItem?.asset.duration.timescale; 
        //  let seconds = kCMTimeZero 
        let time = CMTimeMakeWithSeconds(8.0 , timeScale!) 
        self.Player.seek(to: time, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero) 
        self.Player.play() 


       } 
      } 
     } 
    }) 

的问题是,直到玩家已经完成了电影

您可以添加周期时间观测监视当前时间这个回路不会因为AVPlayerItemDidPlayToEndTime的工作打印(“OK”)将无法正常工作

let timeObserverToken = player.addPeriodicTimeObserver(forInterval: someInterval, queue: DispatchQueue.main) { [unowned self] time in 
    let seconds = CMTimeGetSeconds(cmTime) 
    if seconds >= 8.0 { 
     // jump back to 4 seconds 
     // do stuff 
    } 
} 
+0

我应该为forInterval写什么?因为我使用了init(),但是我收到了错误libC++ abi.dylib:终止于类型为NSException的未捕获异常 –

有几个选项:

如果你想无缝播放,您可以通过使用开始:

预iOS的10:https://developer.apple.com/library/content/samplecode/avloopplayer/Introduction/Intro.html

的iOS 10+: https://developer.apple.com/documentation/avfoundation/avplayerlooper

预ios10“解决方案“从苹果开始工作,并且是自从我瞄准ios9以来获得无间隙循环的唯一方式。

如果您使用的是该解决方案,您还需要以正确的长度提供avplayeritem或添加到解决方案中,以便在将其发送给播放器时将其剪切。为此,你可以做一些类似于我改变苹果代码的东西(对不起,如果它有点稀疏 - 只是试图显示主要的改变) - 基本上增加发送轨道和大量的时间使用,然后使那AVMutableCompositionTrack(我摆脱了所有的视频的东西 - 你会想保持在那里):

class myClass: someClass { 
    var loopPlayer:QueuePlayerLooper! 

    var avAssetLength:Int64! 
    var avAssetTimescale:CMTimeScale! 
    var avAssetTimeRange:CMTimeRange! 

    let composition = AVMutableComposition() 
    var playerItem:AVPlayerItem! 

    var avAssetrack:AVAssetTrack! 

    var compAudioTrack:AVMutableCompositionTrack! 

    var uurl:URL! 
    var avAsset:AVURLAsset! 

    func createCMTimeRange(start:TimeInterval, end:TimeInterval) -> CMTimeRange { 
     avAssetTimescale = avAssetTrack.naturalTimeScale 

     let a:CMTime = CMTime(seconds: start, preferredTimescale: avAssetTimescale) 
     let b:CMTime = CMTime(seconds: end, preferredTimescale: avAssetTimescale) 
     return CMTimeRange(start: a, end: b) 
    } 


    func startLoopingSection() { 
     loopPlayer = QueuePlayerLooper(audioURL: uurl, loopCount: -1, timeRange: createCMTimeRange(start: a_playbackPosition, end: b_playbackPosition)) 
     loopPlayer.start() 

    } 


} 

//--==--==--==--==--==--==--==--==-- 

/* 
Copyright (C) 2016 Apple Inc. All Rights Reserved. 
See LICENSE.txt for this sample’s licensing information 

Abstract: 
An object that uses AVQueuePlayer to loop a video. 
*/ 

// Marked changed code with ++ 

class QueuePlayerLooper : NSObject, Looper { 
    // MARK: Types 

    private struct ObserverContexts { 
     static var playerStatus = 0 

     static var playerStatusKey = "status" 

     static var currentItem = 0 

     static var currentItemKey = "currentItem" 

     static var currentItemStatus = 0 

     static var currentItemStatusKey = "currentItem.status" 

     static var urlAssetDurationKey = "duration" 

     static var urlAssetPlayableKey = "playable" 
    } 

    // MARK: Properties 

    private var player: AVQueuePlayer? 

    private var playerLayer: AVPlayerLayer? 

    private var isObserving = false 

    private var numberOfTimesPlayed = 0 

    private let numberOfTimesToPlay: Int 

    private let videoURL: URL 

    ++var assetTimeRange:CMTimeRange! 
    ++let composition = AVMutableComposition() 
    ++var currentTrack:AVAssetTrack! 
    ++var assetTimeRange:CMTimeRange! 



    // MARK: Looper 

    required init(videoURL: URL, loopCount: Int, ++timeRange:CMTimeRange) { 
     self.videoURL = videoURL 
     self.numberOfTimesToPlay = loopCount 
     ++self.assetTimeRange = timeRange 
     super.init() 
     super.init() 
    } 

    func start(in parentLayer: CALayer) { 
     stop() 

     player = AVQueuePlayer() 
     playerLayer = AVPlayerLayer(player: player) 

     guard let playerLayer = playerLayer else { fatalError("Error creating player layer") } 
     playerLayer.frame = parentLayer.bounds 
     parentLayer.addSublayer(playerLayer) 


     let videoAsset = AVURLAsset(url: videoURL) 

     ++currentTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID()) 
     ++currentTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo) 
     ++try! compositionTrack.insertTimeRange(assetTimeRange, of: currentTrack, at: CMTimeMake(0, 1)) 





     videoAsset.loadValuesAsynchronously(forKeys: [ObserverContexts.urlAssetDurationKey, ObserverContexts.urlAssetPlayableKey]) { 
      /* 
       The asset invokes its completion handler on an arbitrary queue 
       when loading is complete. Because we want to access our AVQueuePlayer 
       in our ensuing set-up, we must dispatch our handler to the main 
       queue. 
      */ 
      DispatchQueue.main.async(execute: { 
       var durationError: NSError? 
       let durationStatus = videoAsset.statusOfValue(forKey: ObserverContexts.urlAssetDurationKey, error: &durationError) 
       guard durationStatus == .loaded else { fatalError("Failed to load duration property with error: \(durationError)") } 

       var playableError: NSError? 
       let playableStatus = videoAsset.statusOfValue(forKey: ObserverContexts.urlAssetPlayableKey, error: &playableError) 
       guard playableStatus == .loaded else { fatalError("Failed to read playable duration property with error: \(playableError)") } 

       guard videoAsset.isPlayable else { 
        print("Can't loop since asset is not playable") 
        return 
       } 

       guard CMTimeCompare(videoAsset.duration, CMTime(value:1, timescale:100)) >= 0 else { 
        print("Can't loop since asset duration too short. Duration is(\(CMTimeGetSeconds(videoAsset.duration)) seconds") 
       return 
      } 

      /* 
      Based on the duration of the asset, we decide the number of player 
      items to add to demonstrate gapless playback of the same asset. 
      */ 
      let numberOfPlayerItems = (Int)(1.0/CMTimeGetSeconds(videoAsset.duration)) + 2 

      for _ in 1...numberOfPlayerItems { 
       let loopItem = AVPlayerItem(asset: ++self.composition) 
       self.player?.insert(loopItem, after: nil) 
      } 

      self.startObserving() 
      self.numberOfTimesPlayed = 0 
      self.player?.play() 
     }) 
    } 
} 

}} 
+0

首先我有视频播放器而不是音频播放器 - 第二我希望当用户按下按钮时,例如循环开始从视频的4.0秒到8.0秒 –

+0

我链接的所有东西都可以默认做视频。当您制作avplayerlooper时,您可以在init中正确选择时间范围。对于ios10之前的版本,您需要使用类似组合的东西来告诉它播放的是哪一部分 – solenoid

+0

但是我无法在这些秒之间看到您的代码中的4.0秒和8.0秒的循环视频 –