当前位置: 首页>移动开发>正文

iOS开发avplayer视频拉流 ios怎么播放avi

       和音频播放一样,ios也提供个很多的API。如mediaPlayer.framework下的MPMediaPlayerController、AVFounditon.framework下的AVPlayer和AVKit下的AVPlayerViewcontroller。MPMovieplayerController已经在ios9.0中被废弃了,用来替代的是AVPlayerViewcontroller。

        所以下面我主要介绍一下AVPlayer。

        AVPlayer既可以播放音乐又可以播放视频;使用AVPlayer不能直接显示视频,必须要加入AVPlayerLayer中,并添加到其他能显示的layer中。

        AVPlayer和AVAudioPlayer,如果要使用的话,需要添加 AVFoundation.framework 和其头文件 #import <AVFoundation/AVFoundation>

主要代码如下(接上一篇文章)

/*
 iOS9默认禁用http协议,改为https协议,如果继续使用http则会报错
 App Transport Security has blocked a cleartext HTTP (http://) resource load since it is insecure. Temporary exceptions can be configured via your app's Info.plist file.
 因此需要修改info.plist文件,添加如下
 <key>NSAppTransportSecurity</key>
 <dict>
 <key>NSAllowsArbitraryLoads</key>
 <true/>
 </dict>
 */
-(void)addAVPlayerLayer {
    //NSString *path = [[NSBundle mainBundle] pathForResource:@"183" ofType:@"wav"];
    //NSURL *url = [NSURL fileURLWithPath:path];
    //NSURL *url = [NSURL URLWithString:@"http://v.jxvdy.com/sendfile/w5bgP3A8JgiQQo5l0hvoNGE2H16WbN09X-ONHPq3P3C1BISgf7C-qVs6_c8oaw3zKScO78I--b0BGFBRxlpw13sf2e54QA"];
    NSURL *url = [NSURL URLWithString:@"http://static.tripbe.com/videofiles/20121214/9533522808.f4v.mp4"];
    
    //'MPMoviePlayerController' is deprecated: first deprecated in iOS 9.0 - Use AVPlayerViewController in AVKit
    //[self.view addSubview:[MPMoviePlayerController new].view];
    self.avPlayerItem = [AVPlayerItem playerItemWithURL:url];
    self.avPlayer = [AVPlayer playerWithPlayerItem:self.avPlayerItem];
    AVPlayerLayer *avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
    CGRect screenRect = [UIScreen mainScreen].bounds;
    avPlayerLayer.frame = CGRectMake(0, screenRect.size.height/2, screenRect.size.width, screenRect.size.height/2);
    /*
    AVF_EXPORT NSString *const AVLayerVideoGravityResizeAspect NS_AVAILABLE(10_7, 4_0);
    AVF_EXPORT NSString *const AVLayerVideoGravityResizeAspectFill NS_AVAILABLE(10_7, 4_0);
    AVF_EXPORT NSString *const AVLayerVideoGravityResize NS_AVAILABLE(10_7, 4_0);
     */
    avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
    avPlayerLayer.repeatCount = 1;
    //avPlayerLayer.backgroundColor = [UIColor blueColor].CGColor;
    [self.view.layer addSublayer:avPlayerLayer];
    
    //调用
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(avPlayerDidEnd) name:AVPlayerItemDidPlayToEndTimeNotification object:self.avPlayerItem];
    /*
     status共有三种属性
     (1)AVPlayerStatusUnknown,
     (2)AVPlayerStatusReadyToPlay,
     (3)AVPlayerStatusFailed
     */
    [self.avPlayerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
    //loadedTimeRanges 表示已经缓冲的进度,监听此属性可以在UI中更新缓冲进度
    [self.avPlayerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil];
    NSLog(@"AVPlayerItemStatus enum = {%ld, %ld, %ld}", AVPlayerItemStatusUnknown, AVPlayerStatusReadyToPlay, AVPlayerItemStatusFailed);
}

-(void)playAVPlayer {
    NSLog(@"AVPlayer play");
    [self.avPlayerItem seekToTime:kCMTimeZero];
    [self.avPlayer play];
}

-(void)avPlayerDidEnd {
    NSLog(@"AVPlayer end");
}

//定义在@interface NSKeyValueObserving.h
//Given that the receiver has been registered as an observer of the value at a key path relative to an object, be notified of a change to that value.
-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
    NSLog(@"observeValueForKeyPath");
    AVPlayerItem *playerItem = (AVPlayerItem *)object;
    if([keyPath isEqualToString:@"status"]) {
        switch (playerItem.status) {
            case AVPlayerItemStatusUnknown:
                NSLog(@"AVPlayerItemStatusUnknown");
                break;
            case AVPlayerItemStatusReadyToPlay:
            {
                NSLog(@"AVPlayerItemStatusReadyToPlay");
                CMTime duration = playerItem.duration;
                //转换为时间戳(1970到现在的秒数),timescale是 时间表[尺度],时标,时间量程(value/timescale = seconds)
                CGFloat totalSecond = duration.value / duration.timescale;
                CGFloat totalSecond2 = CMTimeGetSeconds(duration);
                NSLog(@"AVPlayer getSeconds(value/timescale) = %f", totalSecond);
                NSLog(@"AVPlayer getSeconds(CMTimeGetSeconds) = %f", totalSecond2);
                NSString *totalTimeStr = [self convertTime:totalSecond];
                NSLog(@"AVPlayer totalTimeStr = %@", totalTimeStr);
                //监听播放状态
                [self monitoringPlayback:playerItem];
            }
                break;
            case AVPlayerItemStatusFailed:
                NSLog(@"AVPlayerItemStatusFailed");
                break;
            default:
                break;
        }
    } else if([keyPath isEqualToString:@"loadedTimeRanges"]) {
        //计算缓冲进度
        NSTimeInterval timeInterval = [self aVPlayerLoadedEndTime];
        NSLog(@"AVPlayer timeInterval = %f", timeInterval);
        CMTime duration = playerItem.duration;
        CGFloat totalDuration = CMTimeGetSeconds(duration);
        CGFloat progress = timeInterval / totalDuration;
        NSLog(@"AVPlayer loadedTimeRanges = %f", progress);
    }
}

-(NSString *)convertTime:(CGFloat)second {
    //second为1970年到现在的TimeInterval(秒数)
    NSDate *date = [NSDate dateWithTimeIntervalSince1970:second];
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    if(second / 3600 >= 1) {
        [formatter setDateFormat:@"HH:mm:ss"];
    } else {
        [formatter setDateFormat:@"mm:ss"];
    }
    NSString *timeStr = [formatter stringFromDate:date];
    return timeStr;
}

-(void)monitoringPlayback:(AVPlayerItem *)playerItem {
    __block ViewController *myClass = self;
    //CMTimeMake(a,b)        a当前第几帧,b每秒多少帧。当前播放时间为a/b
    //CMTimeMakeWithSeconds  a当前时间,b每秒多少帧。当前的帧序号为a*b
    //Periodic是周期性, 队列传NULL代表在主线程中执行
    [self.avPlayer addPeriodicTimeObserverForInterval:CMTimeMake(1, 1) queue:NULL usingBlock:^(CMTime time) {
        //计算当前在第几秒
        CGFloat currentSecond = self.avPlayerItem.currentTime.value/self.avPlayerItem.currentTime.timescale;
        //Capturing 'self' strongly in this block is likely to lead to a retain cycle
        NSString *currentTimeStr = [myClass convertTime:currentSecond];
        NSLog(@"AVPlayer monitoringPlayback currentSecond = %f (%@)", currentSecond, currentTimeStr);
    }];
}

//获取当前缓冲的进度,换算成与1970年相差的秒数
-(NSTimeInterval)aVPlayerLoadedEndTime {
    //NSValue对象是用来存储一个C或Objective-C数据的简单容器。它可以保存任意类型的数据(既包括基本类型,也包括point、struct、id)
    //NSValue类的目标就是允许以上数据类型的数据结构能够被添加到集合里。
    NSArray<NSValue *> *loadedTimeRanges = self.avPlayer.currentItem.loadedTimeRanges;
    NSLog(@"AVPlayer timeRanges = %@", loadedTimeRanges);
    /*
    获取缓冲区域
    typedef struct
    {
        CMTime			start;		//@field start The start time of the time range.
        CMTime			duration;	// @field duration The duration of the time range.
    } CMTimeRange;
    */
    CMTimeRange timeRange = [loadedTimeRanges.firstObject CMTimeRangeValue];
    //Converts a CMTime to seconds.
    float startSeconds = CMTimeGetSeconds(timeRange.start);
    float durationSeconds = CMTimeGetSeconds(timeRange.duration);
    NSTimeInterval endSeconds = startSeconds + durationSeconds;
    return endSeconds;
}

运行打印截图

打开程序,但还未按下播放按钮时,会显示第一帧。

iOS开发avplayer视频拉流 ios怎么播放avi,iOS开发avplayer视频拉流 ios怎么播放avi_数据,第1张

(此处略去一些打印内容。。。。。。)

可以看到如果不等其全部缓冲好,就按下 “播放”按钮其也可以正常播放,这就说明AVPlayer是一个网络播放器(即AVPlayer支持变下边播的流媒体播放器)

左图为等其全部加载好再按下"播放"按钮;右图为还未全部缓冲完就点击"播放"按钮

iOS开发avplayer视频拉流 ios怎么播放avi,iOS开发avplayer视频拉流 ios怎么播放avi_ide_02,第2张

 

iOS开发avplayer视频拉流 ios怎么播放avi,iOS开发avplayer视频拉流 ios怎么播放avi_iOS开发avplayer视频拉流_03,第3张

(此处略去一些打印内容。。。。。。)

周期性的打印每帧的信息。

iOS开发avplayer视频拉流 ios怎么播放avi,iOS开发avplayer视频拉流 ios怎么播放avi_数据_04,第4张

播放完毕后会停在最后一帧


https://www.xamrdz.com/mobile/4z81923152.html

相关文章: