一直觉得自己写的不是技术,而是情怀,一个个的教程是自己这一路走来的痕迹。靠专业技能的成功是最具可复制性的,希望我的这条路能让你们少走弯路,希望我能帮你们抹去知识的蒙尘,希望我能帮你们理清知识的脉络,希望未来技术之巅上有你们也有我。
VideoPlay实例代码
官网文档链接
1.基本概念
1.1.AVPlayer
AVPlayer是一个用来播放的对象,支持播放本地,分步下载,或者通过HLS协议得到的流媒体. 它是一个不可见的组件,如果播放mp3,mp4类的音频文件,可以.但是如果要想播放视频文件,我们就需要了解另外一个类AVPlayerLayer AVPlayerLayer 它是对于CALayer类的扩展,通过框架在屏幕上显示内容,作为视频的渲染面,然后在用户面前进行展示.在创建AVPlayerLayer时,同时需要一个指向 AVPlayer的指针,把两者联系在一起.
1.2.AVPlayerItem
说白了,AVPlayerItem是一个载体,承载AVAsset,然后通过AVPlayer进行播放.我们要想对一个资源进行播放,那么就要通过AVPlayerItem和AVPlayerItemTrack来构建对应的动态内容
2.基本使用
加载本地mp4视频
注意视频一加载就会播放
let filePath = Bundle.main.path(forResource: "SuchAs", ofType: "mp4")
let videoURL = URL(fileURLWithPath: filePath!)
playerItem = AVPlayerItem(url: videoURL)
player = AVPlayer(playerItem: playerItem)
player.rate = 1.0//播放速度 播放前设置
//创建显示视频的图层
let playerLayer = AVPlayerLayer.init(player: player)
playerLayer.videoGravity = .resizeAspect
playerLayer.frame = CGRect(x: 0, y: 0, width: 150, height: 300)
playView.layer.addSublayer(playerLayer)
player.pause() //停止播放
加载网络视频
let videoURL = URL(string: "http://n1cdn.miaopai.com/stream/1UKfVpOmazRYEb4fVejwhgpX~3uIxmHBV~8VCQ___0_1506471211.mp4?ssig=e9a0601e2c3261e5c6b6c91a1111ced3&time_stamp=1652542020536")
playerItem = AVPlayerItem(url: videoURL!)
player = AVPlayer(playerItem: playerItem)
player.rate = 1.0//播放速度 播放前设置
//创建显示视频的图层
let playerLayer = AVPlayerLayer.init(player: player)
playerLayer.videoGravity = .resizeAspect
playerLayer.frame = CGRect(x: 0, y: 0, width: 150, height: 300)
playView.layer.addSublayer(playerLayer)
player.pause()
监听播放结束
NotificationCenter.default.addObserver(self, selector: #selector(playToEndTime), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
//监听播放结束
@objc func playToEndTime(){
print("播放完成")
}
deinit {
//监听播放结束
NotificationCenter.default.removeObserver(self)
}
设置播放完成只有重复播放
@objc func playToEndTime(){
player?.seek(to: CMTime(value: 0, timescale: 1))
player?.play()
}
监听缓存状态等
//观察属性
self.playerItem.addObserver(self, forKeyPath: "status", options: .new, context: nil)
//缓存区间,可用来获取缓存了多少
self.playerItem.addObserver(self, forKeyPath: "loadedTimeRanges", options: .new, context: nil)
//缓存不够了 自动暂停播放
self.playerItem.addObserver(self, forKeyPath: "playbackBufferEmpty", options: .new, context: nil)
//缓存好了 手动播放
self.playerItem.addObserver(self, forKeyPath: "playbackLikelyToKeepUp", options: .new, context: nil)
//KVO观察
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if keyPath == "status" {
switch self.playerItem.status{
case .readyToPlay:
//player.play()
print("play")
case .failed:
print("failed")
case.unknown:
print("unkonwn")
default:
break
}
}else if keyPath == "loadedTimeRanges"{
let loadTimeArray = self.playerItem.loadedTimeRanges
//获取最新缓存的区间
let newTimeRange : CMTimeRange = loadTimeArray.first as! CMTimeRange
let startSeconds = CMTimeGetSeconds(newTimeRange.start);
let durationSeconds = CMTimeGetSeconds(newTimeRange.duration);
let totalBuffer = startSeconds + durationSeconds;//缓冲总长度
print("当前缓冲时间:%f",totalBuffer)
}else if keyPath == "playbackBufferEmpty"{
print("正在缓存视频请稍等")
}
else if keyPath == "playbackLikelyToKeepUp"{
print("缓存好了继续播放")
//self.player.play()
}
}
监听是否播放完
//用于实时监听滑块的进度
self.player.addPeriodicTimeObserver(forInterval: CMTimeMake(value: 1, timescale: 1), queue: DispatchQueue.main) { [weak self](time) in
//当前正在播放的时间
let loadTime = CMTimeGetSeconds(time)
//视频总时间
let totalTime = CMTimeGetSeconds((self?.player.currentItem?.duration)!)
//滑块进度
self?.slider.value = Float(loadTime/totalTime)
self?.loadTimeLabel.text = self?.changeTimeFormat(timeInterval: loadTime)
self?.totalTimeLabel.text = self?.changeTimeFormat(timeInterval: CMTimeGetSeconds((self?.player.currentItem?.duration)!))
}
//转时间格式
func changeTimeFormat(timeInterval:TimeInterval) -> String{
return String(format: "%02d:%02d:%02d",(Int(timeInterval) % 3600) / 60, Int(timeInterval) / 3600,Int(timeInterval) % 60)
}
3.需求解决
3.1 获取视频的第一帧返回一张图片
3.1.1 本地视频
import UIKit
import AVFoundation
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
let thumb = UIImageView.init(frame: CGRect.init(x: 50, y: 200, width: 300, height: 200))
self.view.addSubview(thumb)
let filePath = Bundle.main.path(forResource: "SuchAs", ofType: "mp4")
let videoURL = URL(fileURLWithPath: filePath!)
let asset = AVURLAsset.init(url: videoURL, options: nil)
let gen = AVAssetImageGenerator.init(asset: asset)
gen.appliesPreferredTrackTransform = true
let time = CMTimeMakeWithSeconds(0.0, preferredTimescale: 1)
var actualTime : CMTime = CMTimeMakeWithSeconds(0, preferredTimescale: 0)
do {
let image = try gen.copyCGImage(at: time, actualTime: &actualTime)
thumb.image = UIImage.init(cgImage: image)
} catch {
print("错误")
}
}
}
3.1.2 网络视频
import UIKit
import AVFoundation
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
let thumb = UIImageView.init(frame: CGRect.init(x: 50, y: 200, width: 300, height: 200))
self.view.addSubview(thumb)
let asset = AVURLAsset.init(url: URL.init(string: "http://gslb.miaopai.com/stream/1UKfVpOmazRYEb4fVejwhgpX~3uIxmHBV~8VCQ__.mp4")!, options: nil)
let gen = AVAssetImageGenerator.init(asset: asset)
gen.appliesPreferredTrackTransform = true
let time = CMTimeMakeWithSeconds(0.0, preferredTimescale: 1)
var actualTime : CMTime = CMTimeMakeWithSeconds(0, preferredTimescale: 0)
do {
let image = try gen.copyCGImage(at: time, actualTime: &actualTime)
thumb.image = UIImage.init(cgImage: image)
} catch {
print("错误")
}
}
}
|