Initial commit: SimVision tvOS streaming app
Features: - VOD library with movie grouping and version detection - TV show library with season/episode organization - TMDB integration for trending shows and recently aired episodes - Recent releases section with TMDB release date sorting - Watch history tracking with continue watching - Playlist caching (12-hour TTL) for offline support - M3U playlist parsing with XStream API support - Authentication with credential storage Technical: - SwiftUI for tvOS - Actor-based services for thread safety - Persistent caching for playlists, TMDB data, and watch history - KSPlayer integration for video playback Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
592
KSPlayer-main/Sources/KSPlayer/AVPlayer/KSAVPlayer.swift
Normal file
592
KSPlayer-main/Sources/KSPlayer/AVPlayer/KSAVPlayer.swift
Normal file
@@ -0,0 +1,592 @@
|
||||
import AVFoundation
|
||||
import AVKit
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
|
||||
public typealias UIImage = NSImage
|
||||
#endif
|
||||
import Combine
|
||||
import CoreGraphics
|
||||
|
||||
public final class KSAVPlayerView: UIView {
|
||||
public let player = AVQueuePlayer()
|
||||
override public init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
#if !canImport(UIKit)
|
||||
layer = AVPlayerLayer()
|
||||
#endif
|
||||
playerLayer.player = player
|
||||
player.automaticallyWaitsToMinimizeStalling = false
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
public required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
override public var contentMode: UIViewContentMode {
|
||||
get {
|
||||
switch playerLayer.videoGravity {
|
||||
case .resize:
|
||||
return .scaleToFill
|
||||
case .resizeAspect:
|
||||
return .scaleAspectFit
|
||||
case .resizeAspectFill:
|
||||
return .scaleAspectFill
|
||||
default:
|
||||
return .scaleAspectFit
|
||||
}
|
||||
}
|
||||
set {
|
||||
switch newValue {
|
||||
case .scaleToFill:
|
||||
playerLayer.videoGravity = .resize
|
||||
case .scaleAspectFit:
|
||||
playerLayer.videoGravity = .resizeAspect
|
||||
case .scaleAspectFill:
|
||||
playerLayer.videoGravity = .resizeAspectFill
|
||||
case .center:
|
||||
playerLayer.videoGravity = .resizeAspect
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if canImport(UIKit)
|
||||
override public class var layerClass: AnyClass { AVPlayerLayer.self }
|
||||
#endif
|
||||
fileprivate var playerLayer: AVPlayerLayer {
|
||||
// swiftlint:disable force_cast
|
||||
layer as! AVPlayerLayer
|
||||
// swiftlint:enable force_cast
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
public class KSAVPlayer {
|
||||
private var cancellable: AnyCancellable?
|
||||
private var options: KSOptions {
|
||||
didSet {
|
||||
player.currentItem?.preferredForwardBufferDuration = options.preferredForwardBufferDuration
|
||||
cancellable = options.$preferredForwardBufferDuration.sink { [weak self] newValue in
|
||||
self?.player.currentItem?.preferredForwardBufferDuration = newValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private let playerView = KSAVPlayerView()
|
||||
private var urlAsset: AVURLAsset
|
||||
private var shouldSeekTo = TimeInterval(0)
|
||||
private var playerLooper: AVPlayerLooper?
|
||||
private var statusObservation: NSKeyValueObservation?
|
||||
private var loadedTimeRangesObservation: NSKeyValueObservation?
|
||||
private var bufferEmptyObservation: NSKeyValueObservation?
|
||||
private var likelyToKeepUpObservation: NSKeyValueObservation?
|
||||
private var bufferFullObservation: NSKeyValueObservation?
|
||||
private var itemObservation: NSKeyValueObservation?
|
||||
private var loopCountObservation: NSKeyValueObservation?
|
||||
private var loopStatusObservation: NSKeyValueObservation?
|
||||
private var mediaPlayerTracks = [AVMediaPlayerTrack]()
|
||||
private var error: Error? {
|
||||
didSet {
|
||||
if let error {
|
||||
delegate?.finish(player: self, error: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private lazy var _pipController: Any? = {
|
||||
if #available(tvOS 14.0, *) {
|
||||
let pip = KSPictureInPictureController(playerLayer: playerView.playerLayer)
|
||||
return pip
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}()
|
||||
|
||||
@available(tvOS 14.0, *)
|
||||
public var pipController: KSPictureInPictureController? {
|
||||
_pipController as? KSPictureInPictureController
|
||||
}
|
||||
|
||||
public var naturalSize: CGSize = .zero
|
||||
public let dynamicInfo: DynamicInfo? = nil
|
||||
@available(macOS 12.0, iOS 15.0, tvOS 15.0, *)
|
||||
public var playbackCoordinator: AVPlaybackCoordinator {
|
||||
playerView.player.playbackCoordinator
|
||||
}
|
||||
|
||||
public private(set) var bufferingProgress = 0 {
|
||||
didSet {
|
||||
delegate?.changeBuffering(player: self, progress: bufferingProgress)
|
||||
}
|
||||
}
|
||||
|
||||
public weak var delegate: MediaPlayerDelegate?
|
||||
public private(set) var duration: TimeInterval = 0
|
||||
public private(set) var fileSize: Double = 0
|
||||
public private(set) var playableTime: TimeInterval = 0
|
||||
public let chapters: [Chapter] = []
|
||||
public var playbackRate: Float = 1 {
|
||||
didSet {
|
||||
if playbackState == .playing {
|
||||
player.rate = playbackRate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var playbackVolume: Float = 1.0 {
|
||||
didSet {
|
||||
if player.volume != playbackVolume {
|
||||
player.volume = playbackVolume
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public private(set) var loadState = MediaLoadState.idle {
|
||||
didSet {
|
||||
if loadState != oldValue {
|
||||
playOrPause()
|
||||
if loadState == .loading || loadState == .idle {
|
||||
bufferingProgress = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public private(set) var playbackState = MediaPlaybackState.idle {
|
||||
didSet {
|
||||
if playbackState != oldValue {
|
||||
playOrPause()
|
||||
if playbackState == .finished {
|
||||
delegate?.finish(player: self, error: nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public private(set) var isReadyToPlay = false {
|
||||
didSet {
|
||||
if isReadyToPlay != oldValue {
|
||||
if isReadyToPlay {
|
||||
options.readyTime = CACurrentMediaTime()
|
||||
delegate?.readyToPlay(player: self)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if os(xrOS)
|
||||
public var allowsExternalPlayback = false
|
||||
public var usesExternalPlaybackWhileExternalScreenIsActive = false
|
||||
public let isExternalPlaybackActive = false
|
||||
#else
|
||||
public var allowsExternalPlayback: Bool {
|
||||
get {
|
||||
player.allowsExternalPlayback
|
||||
}
|
||||
set {
|
||||
player.allowsExternalPlayback = newValue
|
||||
}
|
||||
}
|
||||
|
||||
#if os(macOS)
|
||||
public var usesExternalPlaybackWhileExternalScreenIsActive = false
|
||||
#else
|
||||
public var usesExternalPlaybackWhileExternalScreenIsActive: Bool {
|
||||
get {
|
||||
player.usesExternalPlaybackWhileExternalScreenIsActive
|
||||
}
|
||||
set {
|
||||
player.usesExternalPlaybackWhileExternalScreenIsActive = newValue
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
public var isExternalPlaybackActive: Bool {
|
||||
player.isExternalPlaybackActive
|
||||
}
|
||||
#endif
|
||||
|
||||
public required init(url: URL, options: KSOptions) {
|
||||
KSOptions.setAudioSession()
|
||||
urlAsset = AVURLAsset(url: url, options: options.avOptions)
|
||||
self.options = options
|
||||
itemObservation = player.observe(\.currentItem) { [weak self] player, _ in
|
||||
guard let self else { return }
|
||||
self.observer(playerItem: player.currentItem)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension KSAVPlayer {
|
||||
public var player: AVQueuePlayer { playerView.player }
|
||||
public var playerLayer: AVPlayerLayer { playerView.playerLayer }
|
||||
@objc private func moviePlayDidEnd(notification _: Notification) {
|
||||
if !options.isLoopPlay {
|
||||
playbackState = .finished
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func playerItemFailedToPlayToEndTime(notification: Notification) {
|
||||
var playError: Error?
|
||||
if let userInfo = notification.userInfo {
|
||||
if let error = userInfo["error"] as? Error {
|
||||
playError = error
|
||||
} else if let error = userInfo[AVPlayerItemFailedToPlayToEndTimeErrorKey] as? NSError {
|
||||
playError = error
|
||||
} else if let errorCode = (userInfo["error"] as? NSNumber)?.intValue {
|
||||
playError = NSError(domain: "AVMoviePlayer", code: errorCode, userInfo: nil)
|
||||
}
|
||||
}
|
||||
delegate?.finish(player: self, error: playError)
|
||||
}
|
||||
|
||||
private func updateStatus(item: AVPlayerItem) {
|
||||
if item.status == .readyToPlay {
|
||||
options.findTime = CACurrentMediaTime()
|
||||
mediaPlayerTracks = item.tracks.map {
|
||||
AVMediaPlayerTrack(track: $0)
|
||||
}
|
||||
let playableVideo = mediaPlayerTracks.first {
|
||||
$0.mediaType == .video && $0.isPlayable
|
||||
}
|
||||
if let playableVideo {
|
||||
naturalSize = playableVideo.naturalSize
|
||||
} else {
|
||||
error = NSError(errorCode: .videoTracksUnplayable)
|
||||
return
|
||||
}
|
||||
// 默认选择第一个声道
|
||||
item.tracks.filter { $0.assetTrack?.mediaType.rawValue == AVMediaType.audio.rawValue }.dropFirst().forEach { $0.isEnabled = false }
|
||||
duration = item.duration.seconds
|
||||
let estimatedDataRates = item.tracks.compactMap { $0.assetTrack?.estimatedDataRate }
|
||||
fileSize = Double(estimatedDataRates.reduce(0, +)) * duration / 8
|
||||
isReadyToPlay = true
|
||||
} else if item.status == .failed {
|
||||
error = item.error
|
||||
}
|
||||
}
|
||||
|
||||
private func updatePlayableDuration(item: AVPlayerItem) {
|
||||
let first = item.loadedTimeRanges.first { CMTimeRangeContainsTime($0.timeRangeValue, time: item.currentTime()) }
|
||||
if let first {
|
||||
playableTime = first.timeRangeValue.end.seconds
|
||||
guard playableTime > 0 else { return }
|
||||
let loadedTime = playableTime - currentPlaybackTime
|
||||
guard loadedTime > 0 else { return }
|
||||
bufferingProgress = Int(min(loadedTime * 100 / item.preferredForwardBufferDuration, 100))
|
||||
if bufferingProgress >= 100 {
|
||||
loadState = .playable
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func playOrPause() {
|
||||
if playbackState == .playing {
|
||||
if loadState == .playable {
|
||||
player.play()
|
||||
player.rate = playbackRate
|
||||
}
|
||||
} else {
|
||||
player.pause()
|
||||
}
|
||||
delegate?.changeLoadState(player: self)
|
||||
}
|
||||
|
||||
private func replaceCurrentItem(playerItem: AVPlayerItem?) {
|
||||
player.currentItem?.cancelPendingSeeks()
|
||||
if options.isLoopPlay {
|
||||
loopCountObservation?.invalidate()
|
||||
loopStatusObservation?.invalidate()
|
||||
playerLooper?.disableLooping()
|
||||
guard let playerItem else {
|
||||
playerLooper = nil
|
||||
return
|
||||
}
|
||||
playerLooper = AVPlayerLooper(player: player, templateItem: playerItem)
|
||||
loopCountObservation = playerLooper?.observe(\.loopCount) { [weak self] playerLooper, _ in
|
||||
guard let self else { return }
|
||||
self.delegate?.playBack(player: self, loopCount: playerLooper.loopCount)
|
||||
}
|
||||
loopStatusObservation = playerLooper?.observe(\.status) { [weak self] playerLooper, _ in
|
||||
guard let self else { return }
|
||||
if playerLooper.status == .failed {
|
||||
self.error = playerLooper.error
|
||||
}
|
||||
}
|
||||
} else {
|
||||
player.replaceCurrentItem(with: playerItem)
|
||||
}
|
||||
}
|
||||
|
||||
private func observer(playerItem: AVPlayerItem?) {
|
||||
NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playerItem)
|
||||
NotificationCenter.default.removeObserver(self, name: .AVPlayerItemFailedToPlayToEndTime, object: playerItem)
|
||||
statusObservation?.invalidate()
|
||||
loadedTimeRangesObservation?.invalidate()
|
||||
bufferEmptyObservation?.invalidate()
|
||||
likelyToKeepUpObservation?.invalidate()
|
||||
bufferFullObservation?.invalidate()
|
||||
guard let playerItem else { return }
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(moviePlayDidEnd), name: .AVPlayerItemDidPlayToEndTime, object: playerItem)
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(playerItemFailedToPlayToEndTime), name: .AVPlayerItemFailedToPlayToEndTime, object: playerItem)
|
||||
statusObservation = playerItem.observe(\.status) { [weak self] item, _ in
|
||||
guard let self else { return }
|
||||
self.updateStatus(item: item)
|
||||
}
|
||||
loadedTimeRangesObservation = playerItem.observe(\.loadedTimeRanges) { [weak self] item, _ in
|
||||
guard let self else { return }
|
||||
// 计算缓冲进度
|
||||
self.updatePlayableDuration(item: item)
|
||||
}
|
||||
|
||||
let changeHandler: (AVPlayerItem, NSKeyValueObservedChange<Bool>) -> Void = { [weak self] _, _ in
|
||||
guard let self else { return }
|
||||
// 在主线程更新进度
|
||||
if playerItem.isPlaybackBufferEmpty {
|
||||
self.loadState = .loading
|
||||
} else if playerItem.isPlaybackLikelyToKeepUp || playerItem.isPlaybackBufferFull {
|
||||
self.loadState = .playable
|
||||
}
|
||||
}
|
||||
bufferEmptyObservation = playerItem.observe(\.isPlaybackBufferEmpty, changeHandler: changeHandler)
|
||||
likelyToKeepUpObservation = playerItem.observe(\.isPlaybackLikelyToKeepUp, changeHandler: changeHandler)
|
||||
bufferFullObservation = playerItem.observe(\.isPlaybackBufferFull, changeHandler: changeHandler)
|
||||
}
|
||||
}
|
||||
|
||||
extension KSAVPlayer: MediaPlayerProtocol {
|
||||
public var subtitleDataSouce: SubtitleDataSouce? { nil }
|
||||
public var isPlaying: Bool { player.rate > 0 ? true : playbackState == .playing }
|
||||
public var view: UIView? { playerView }
|
||||
public var currentPlaybackTime: TimeInterval {
|
||||
get {
|
||||
if shouldSeekTo > 0 {
|
||||
return TimeInterval(shouldSeekTo)
|
||||
} else {
|
||||
// 防止卡主
|
||||
return isReadyToPlay ? player.currentTime().seconds : 0
|
||||
}
|
||||
}
|
||||
set {
|
||||
seek(time: newValue) { _ in
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var numberOfBytesTransferred: Int64 {
|
||||
guard let playerItem = player.currentItem, let accesslog = playerItem.accessLog(), let event = accesslog.events.first else {
|
||||
return 0
|
||||
}
|
||||
return event.numberOfBytesTransferred
|
||||
}
|
||||
|
||||
public func thumbnailImageAtCurrentTime() async -> CGImage? {
|
||||
guard let playerItem = player.currentItem, isReadyToPlay else {
|
||||
return nil
|
||||
}
|
||||
return await withCheckedContinuation { continuation in
|
||||
urlAsset.thumbnailImage(currentTime: playerItem.currentTime()) { result in
|
||||
continuation.resume(returning: result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func seek(time: TimeInterval, completion: @escaping ((Bool) -> Void)) {
|
||||
let time = max(time, 0)
|
||||
shouldSeekTo = time
|
||||
playbackState = .seeking
|
||||
runOnMainThread { [weak self] in
|
||||
self?.bufferingProgress = 0
|
||||
}
|
||||
let tolerance: CMTime = options.isAccurateSeek ? .zero : .positiveInfinity
|
||||
player.seek(to: CMTime(seconds: time), toleranceBefore: tolerance, toleranceAfter: tolerance) {
|
||||
[weak self] finished in
|
||||
guard let self else { return }
|
||||
self.shouldSeekTo = 0
|
||||
completion(finished)
|
||||
}
|
||||
}
|
||||
|
||||
public func prepareToPlay() {
|
||||
KSLog("prepareToPlay \(self)")
|
||||
options.prepareTime = CACurrentMediaTime()
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
self.bufferingProgress = 0
|
||||
let playerItem = AVPlayerItem(asset: self.urlAsset)
|
||||
self.options.openTime = CACurrentMediaTime()
|
||||
self.replaceCurrentItem(playerItem: playerItem)
|
||||
self.player.actionAtItemEnd = .pause
|
||||
self.player.volume = self.playbackVolume
|
||||
}
|
||||
}
|
||||
|
||||
public func play() {
|
||||
KSLog("play \(self)")
|
||||
playbackState = .playing
|
||||
}
|
||||
|
||||
public func pause() {
|
||||
KSLog("pause \(self)")
|
||||
playbackState = .paused
|
||||
}
|
||||
|
||||
public func shutdown() {
|
||||
KSLog("shutdown \(self)")
|
||||
isReadyToPlay = false
|
||||
playbackState = .stopped
|
||||
loadState = .idle
|
||||
urlAsset.cancelLoading()
|
||||
replaceCurrentItem(playerItem: nil)
|
||||
}
|
||||
|
||||
public func replace(url: URL, options: KSOptions) {
|
||||
KSLog("replaceUrl \(self)")
|
||||
shutdown()
|
||||
urlAsset = AVURLAsset(url: url, options: options.avOptions)
|
||||
self.options = options
|
||||
}
|
||||
|
||||
public var contentMode: UIViewContentMode {
|
||||
get {
|
||||
playerView.contentMode
|
||||
}
|
||||
set {
|
||||
playerView.contentMode = newValue
|
||||
}
|
||||
}
|
||||
|
||||
public func enterBackground() {
|
||||
playerView.playerLayer.player = nil
|
||||
}
|
||||
|
||||
public func enterForeground() {
|
||||
playerView.playerLayer.player = playerView.player
|
||||
}
|
||||
|
||||
public var seekable: Bool {
|
||||
!(player.currentItem?.seekableTimeRanges.isEmpty ?? true)
|
||||
}
|
||||
|
||||
public var isMuted: Bool {
|
||||
get {
|
||||
player.isMuted
|
||||
}
|
||||
set {
|
||||
player.isMuted = newValue
|
||||
}
|
||||
}
|
||||
|
||||
public func tracks(mediaType: AVFoundation.AVMediaType) -> [MediaPlayerTrack] {
|
||||
player.currentItem?.tracks.filter { $0.assetTrack?.mediaType == mediaType }.map { AVMediaPlayerTrack(track: $0) } ?? []
|
||||
}
|
||||
|
||||
public func select(track: some MediaPlayerTrack) {
|
||||
player.currentItem?.tracks.filter { $0.assetTrack?.mediaType == track.mediaType }.forEach { $0.isEnabled = false }
|
||||
track.isEnabled = true
|
||||
}
|
||||
}
|
||||
|
||||
extension AVFoundation.AVMediaType {
|
||||
var mediaCharacteristic: AVMediaCharacteristic {
|
||||
switch self {
|
||||
case .video:
|
||||
return .visual
|
||||
case .audio:
|
||||
return .audible
|
||||
case .subtitle:
|
||||
return .legible
|
||||
default:
|
||||
return .easyToRead
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AVAssetTrack {
|
||||
func toMediaPlayerTrack() {}
|
||||
}
|
||||
|
||||
class AVMediaPlayerTrack: MediaPlayerTrack {
|
||||
let formatDescription: CMFormatDescription?
|
||||
let description: String
|
||||
private let track: AVPlayerItemTrack
|
||||
var nominalFrameRate: Float
|
||||
let trackID: Int32
|
||||
let rotation: Int16 = 0
|
||||
let bitDepth: Int32
|
||||
let bitRate: Int64
|
||||
let name: String
|
||||
let languageCode: String?
|
||||
let mediaType: AVFoundation.AVMediaType
|
||||
let isImageSubtitle = false
|
||||
var dovi: DOVIDecoderConfigurationRecord?
|
||||
let fieldOrder: FFmpegFieldOrder = .unknown
|
||||
var isPlayable: Bool
|
||||
@MainActor
|
||||
var isEnabled: Bool {
|
||||
get {
|
||||
track.isEnabled
|
||||
}
|
||||
set {
|
||||
track.isEnabled = newValue
|
||||
}
|
||||
}
|
||||
|
||||
init(track: AVPlayerItemTrack) {
|
||||
self.track = track
|
||||
trackID = track.assetTrack?.trackID ?? 0
|
||||
mediaType = track.assetTrack?.mediaType ?? .video
|
||||
name = track.assetTrack?.languageCode ?? ""
|
||||
languageCode = track.assetTrack?.languageCode
|
||||
nominalFrameRate = track.assetTrack?.nominalFrameRate ?? 24.0
|
||||
bitRate = Int64(track.assetTrack?.estimatedDataRate ?? 0)
|
||||
#if os(xrOS)
|
||||
isPlayable = false
|
||||
#else
|
||||
isPlayable = track.assetTrack?.isPlayable ?? false
|
||||
#endif
|
||||
// swiftlint:disable force_cast
|
||||
if let first = track.assetTrack?.formatDescriptions.first {
|
||||
formatDescription = first as! CMFormatDescription
|
||||
} else {
|
||||
formatDescription = nil
|
||||
}
|
||||
bitDepth = formatDescription?.bitDepth ?? 0
|
||||
// swiftlint:enable force_cast
|
||||
description = (formatDescription?.mediaSubType ?? .boxed).rawValue.string
|
||||
#if os(xrOS)
|
||||
Task {
|
||||
isPlayable = await (try? track.assetTrack?.load(.isPlayable)) ?? false
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
func load() {}
|
||||
}
|
||||
|
||||
public extension AVAsset {
|
||||
func createImageGenerator() -> AVAssetImageGenerator {
|
||||
let imageGenerator = AVAssetImageGenerator(asset: self)
|
||||
imageGenerator.requestedTimeToleranceBefore = .zero
|
||||
imageGenerator.requestedTimeToleranceAfter = .zero
|
||||
return imageGenerator
|
||||
}
|
||||
|
||||
func thumbnailImage(currentTime: CMTime, handler: @escaping (CGImage?) -> Void) {
|
||||
let imageGenerator = createImageGenerator()
|
||||
imageGenerator.requestedTimeToleranceBefore = .zero
|
||||
imageGenerator.requestedTimeToleranceAfter = .zero
|
||||
imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: currentTime)]) { _, cgImage, _, _, _ in
|
||||
if let cgImage {
|
||||
handler(cgImage)
|
||||
} else {
|
||||
handler(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
679
KSPlayer-main/Sources/KSPlayer/AVPlayer/KSOptions.swift
Normal file
679
KSPlayer-main/Sources/KSPlayer/AVPlayer/KSOptions.swift
Normal file
@@ -0,0 +1,679 @@
|
||||
//
|
||||
// KSOptions.swift
|
||||
// KSPlayer-tvOS
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
#if os(tvOS) || os(xrOS)
|
||||
import DisplayCriteria
|
||||
#endif
|
||||
import OSLog
|
||||
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#endif
|
||||
open class KSOptions {
|
||||
/// 最低缓存视频时间
|
||||
@Published
|
||||
public var preferredForwardBufferDuration = KSOptions.preferredForwardBufferDuration
|
||||
/// 最大缓存视频时间
|
||||
public var maxBufferDuration = KSOptions.maxBufferDuration
|
||||
/// 是否开启秒开
|
||||
public var isSecondOpen = KSOptions.isSecondOpen
|
||||
/// 开启精确seek
|
||||
public var isAccurateSeek = KSOptions.isAccurateSeek
|
||||
/// Applies to short videos only
|
||||
public var isLoopPlay = KSOptions.isLoopPlay
|
||||
/// seek完是否自动播放
|
||||
public var isSeekedAutoPlay = KSOptions.isSeekedAutoPlay
|
||||
/*
|
||||
AVSEEK_FLAG_BACKWARD: 1
|
||||
AVSEEK_FLAG_BYTE: 2
|
||||
AVSEEK_FLAG_ANY: 4
|
||||
AVSEEK_FLAG_FRAME: 8
|
||||
*/
|
||||
public var seekFlags = Int32(1)
|
||||
// ffmpeg only cache http
|
||||
// 这个开关不能用,因为ff_tempfile: Cannot open temporary file
|
||||
public var cache = false
|
||||
// record stream
|
||||
public var outputURL: URL?
|
||||
public var avOptions = [String: Any]()
|
||||
public var formatContextOptions = [String: Any]()
|
||||
public var decoderOptions = [String: Any]()
|
||||
public var probesize: Int64?
|
||||
public var maxAnalyzeDuration: Int64?
|
||||
public var lowres = UInt8(0)
|
||||
public var nobuffer = false
|
||||
public var codecLowDelay = false
|
||||
public var startPlayTime: TimeInterval = 0
|
||||
public var startPlayRate: Float = 1.0
|
||||
public var registerRemoteControll: Bool = true // 默认支持来自系统控制中心的控制
|
||||
public var referer: String? {
|
||||
didSet {
|
||||
if let referer {
|
||||
formatContextOptions["referer"] = "Referer: \(referer)"
|
||||
} else {
|
||||
formatContextOptions["referer"] = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var userAgent: String? = "KSPlayer" {
|
||||
didSet {
|
||||
formatContextOptions["user_agent"] = userAgent
|
||||
}
|
||||
}
|
||||
|
||||
// audio
|
||||
public var audioFilters = [String]()
|
||||
public var syncDecodeAudio = false
|
||||
// sutile
|
||||
public var autoSelectEmbedSubtitle = true
|
||||
public var isSeekImageSubtitle = false
|
||||
// video
|
||||
public var display = DisplayEnum.plane
|
||||
public var videoDelay = 0.0 // s
|
||||
public var autoDeInterlace = false
|
||||
public var autoRotate = true
|
||||
public var destinationDynamicRange: DynamicRange?
|
||||
public var videoAdaptable = true
|
||||
public var videoFilters = [String]()
|
||||
public var syncDecodeVideo = false
|
||||
public var hardwareDecode = KSOptions.hardwareDecode
|
||||
public var asynchronousDecompression = KSOptions.asynchronousDecompression
|
||||
public var videoDisable = false
|
||||
public var canStartPictureInPictureAutomaticallyFromInline = KSOptions.canStartPictureInPictureAutomaticallyFromInline
|
||||
public var automaticWindowResize = true
|
||||
@Published
|
||||
public var videoInterlacingType: VideoInterlacingType?
|
||||
private var videoClockDelayCount = 0
|
||||
|
||||
public internal(set) var formatName = ""
|
||||
public internal(set) var prepareTime = 0.0
|
||||
public internal(set) var dnsStartTime = 0.0
|
||||
public internal(set) var tcpStartTime = 0.0
|
||||
public internal(set) var tcpConnectedTime = 0.0
|
||||
public internal(set) var openTime = 0.0
|
||||
public internal(set) var findTime = 0.0
|
||||
public internal(set) var readyTime = 0.0
|
||||
public internal(set) var readAudioTime = 0.0
|
||||
public internal(set) var readVideoTime = 0.0
|
||||
public internal(set) var decodeAudioTime = 0.0
|
||||
public internal(set) var decodeVideoTime = 0.0
|
||||
public init() {
|
||||
formatContextOptions["user_agent"] = userAgent
|
||||
// 参数的配置可以参考protocols.texi 和 http.c
|
||||
// 这个一定要,不然有的流就会判断不准FieldOrder
|
||||
formatContextOptions["scan_all_pmts"] = 1
|
||||
// ts直播流需要加这个才能一直直播下去,不然播放一小段就会结束了。
|
||||
formatContextOptions["reconnect"] = 1
|
||||
formatContextOptions["reconnect_streamed"] = 1
|
||||
// 这个是用来开启http的链接复用(keep-alive)。vlc默认是打开的,所以这边也默认打开。
|
||||
// 开启这个,百度网盘的视频链接无法播放
|
||||
// formatContextOptions["multiple_requests"] = 1
|
||||
// 下面是用来处理秒开的参数,有需要的自己打开。默认不开,不然在播放某些特殊的ts直播流会频繁卡顿。
|
||||
// formatContextOptions["auto_convert"] = 0
|
||||
// formatContextOptions["fps_probe_size"] = 3
|
||||
// formatContextOptions["rw_timeout"] = 10_000_000
|
||||
// formatContextOptions["max_analyze_duration"] = 300 * 1000
|
||||
// 默认情况下允许所有协议,只有嵌套协议才需要指定这个协议子集,例如m3u8里面有http。
|
||||
// formatContextOptions["protocol_whitelist"] = "file,http,https,tcp,tls,crypto,async,cache,data,httpproxy"
|
||||
// 开启这个,纯ipv6地址会无法播放。并且有些视频结束了,但还会一直尝试重连。所以这个值默认不设置
|
||||
// formatContextOptions["reconnect_at_eof"] = 1
|
||||
// 开启这个,会导致tcp Failed to resolve hostname 还会一直重试
|
||||
// formatContextOptions["reconnect_on_network_error"] = 1
|
||||
// There is total different meaning for 'listen_timeout' option in rtmp
|
||||
// set 'listen_timeout' = -1 for rtmp、rtsp
|
||||
// formatContextOptions["listen_timeout"] = 3
|
||||
decoderOptions["threads"] = "auto"
|
||||
decoderOptions["refcounted_frames"] = "1"
|
||||
}
|
||||
|
||||
/**
|
||||
you can add http-header or other options which mentions in https://developer.apple.com/reference/avfoundation/avurlasset/initialization_options
|
||||
|
||||
to add http-header init options like this
|
||||
```
|
||||
options.appendHeader(["Referer":"https:www.xxx.com"])
|
||||
```
|
||||
*/
|
||||
public func appendHeader(_ header: [String: String]) {
|
||||
var oldValue = avOptions["AVURLAssetHTTPHeaderFieldsKey"] as? [String: String] ?? [
|
||||
String: String
|
||||
]()
|
||||
oldValue.merge(header) { _, new in new }
|
||||
avOptions["AVURLAssetHTTPHeaderFieldsKey"] = oldValue
|
||||
var str = formatContextOptions["headers"] as? String ?? ""
|
||||
for (key, value) in header {
|
||||
str.append("\(key):\(value)\r\n")
|
||||
}
|
||||
formatContextOptions["headers"] = str
|
||||
}
|
||||
|
||||
public func setCookie(_ cookies: [HTTPCookie]) {
|
||||
avOptions[AVURLAssetHTTPCookiesKey] = cookies
|
||||
let cookieStr = cookies.map { cookie in "\(cookie.name)=\(cookie.value)" }.joined(separator: "; ")
|
||||
appendHeader(["Cookie": cookieStr])
|
||||
}
|
||||
|
||||
// 缓冲算法函数
|
||||
open func playable(capacitys: [CapacityProtocol], isFirst: Bool, isSeek: Bool) -> LoadingState {
|
||||
let packetCount = capacitys.map(\.packetCount).min() ?? 0
|
||||
let frameCount = capacitys.map(\.frameCount).min() ?? 0
|
||||
let isEndOfFile = capacitys.allSatisfy(\.isEndOfFile)
|
||||
let loadedTime = capacitys.map(\.loadedTime).min() ?? 0
|
||||
let progress = preferredForwardBufferDuration == 0 ? 100 : loadedTime * 100.0 / preferredForwardBufferDuration
|
||||
let isPlayable = capacitys.allSatisfy { capacity in
|
||||
if capacity.isEndOfFile && capacity.packetCount == 0 {
|
||||
return true
|
||||
}
|
||||
guard capacity.frameCount >= 2 else {
|
||||
return false
|
||||
}
|
||||
if capacity.isEndOfFile {
|
||||
return true
|
||||
}
|
||||
if (syncDecodeVideo && capacity.mediaType == .video) || (syncDecodeAudio && capacity.mediaType == .audio) {
|
||||
return true
|
||||
}
|
||||
if isFirst || isSeek {
|
||||
// 让纯音频能更快的打开
|
||||
if capacity.mediaType == .audio || isSecondOpen {
|
||||
if isFirst {
|
||||
return true
|
||||
} else {
|
||||
return capacity.loadedTime >= self.preferredForwardBufferDuration / 2
|
||||
}
|
||||
}
|
||||
}
|
||||
return capacity.loadedTime >= self.preferredForwardBufferDuration
|
||||
}
|
||||
return LoadingState(loadedTime: loadedTime, progress: progress, packetCount: packetCount,
|
||||
frameCount: frameCount, isEndOfFile: isEndOfFile, isPlayable: isPlayable,
|
||||
isFirst: isFirst, isSeek: isSeek)
|
||||
}
|
||||
|
||||
open func adaptable(state: VideoAdaptationState?) -> (Int64, Int64)? {
|
||||
guard let state, let last = state.bitRateStates.last, CACurrentMediaTime() - last.time > maxBufferDuration / 2, let index = state.bitRates.firstIndex(of: last.bitRate) else {
|
||||
return nil
|
||||
}
|
||||
let isUp = state.loadedCount > Int(Double(state.fps) * maxBufferDuration / 2)
|
||||
if isUp != state.isPlayable {
|
||||
return nil
|
||||
}
|
||||
if isUp {
|
||||
if index < state.bitRates.endIndex - 1 {
|
||||
return (last.bitRate, state.bitRates[index + 1])
|
||||
}
|
||||
} else {
|
||||
if index > state.bitRates.startIndex {
|
||||
return (last.bitRate, state.bitRates[index - 1])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
/// wanted video stream index, or nil for automatic selection
|
||||
/// - Parameter : video track
|
||||
/// - Returns: The index of the track
|
||||
open func wantedVideo(tracks _: [MediaPlayerTrack]) -> Int? {
|
||||
nil
|
||||
}
|
||||
|
||||
/// wanted audio stream index, or nil for automatic selection
|
||||
/// - Parameter : audio track
|
||||
/// - Returns: The index of the track
|
||||
open func wantedAudio(tracks _: [MediaPlayerTrack]) -> Int? {
|
||||
nil
|
||||
}
|
||||
|
||||
open func videoFrameMaxCount(fps _: Float, naturalSize _: CGSize, isLive: Bool) -> UInt8 {
|
||||
isLive ? 4 : 16
|
||||
}
|
||||
|
||||
open func audioFrameMaxCount(fps: Float, channelCount: Int) -> UInt8 {
|
||||
let count = (Int(fps) * channelCount) >> 2
|
||||
if count >= UInt8.max {
|
||||
return UInt8.max
|
||||
} else {
|
||||
return UInt8(count)
|
||||
}
|
||||
}
|
||||
|
||||
/// customize dar
|
||||
/// - Parameters:
|
||||
/// - sar: SAR(Sample Aspect Ratio)
|
||||
/// - dar: PAR(Pixel Aspect Ratio)
|
||||
/// - Returns: DAR(Display Aspect Ratio)
|
||||
open func customizeDar(sar _: CGSize, par _: CGSize) -> CGSize? {
|
||||
nil
|
||||
}
|
||||
|
||||
// 虽然只有iOS才支持PIP。但是因为AVSampleBufferDisplayLayer能够支持HDR10+。所以默认还是推荐用AVSampleBufferDisplayLayer
|
||||
open func isUseDisplayLayer() -> Bool {
|
||||
display == .plane
|
||||
}
|
||||
|
||||
open func urlIO(log: String) {
|
||||
if log.starts(with: "Original list of addresses"), dnsStartTime == 0 {
|
||||
dnsStartTime = CACurrentMediaTime()
|
||||
} else if log.starts(with: "Starting connection attempt to"), tcpStartTime == 0 {
|
||||
tcpStartTime = CACurrentMediaTime()
|
||||
} else if log.starts(with: "Successfully connected to"), tcpConnectedTime == 0 {
|
||||
tcpConnectedTime = CACurrentMediaTime()
|
||||
}
|
||||
}
|
||||
|
||||
private var idetTypeMap = [VideoInterlacingType: UInt]()
|
||||
open func filter(log: String) {
|
||||
if log.starts(with: "Repeated Field:"), autoDeInterlace {
|
||||
for str in log.split(separator: ",") {
|
||||
let map = str.split(separator: ":")
|
||||
if map.count >= 2 {
|
||||
if String(map[0].trimmingCharacters(in: .whitespaces)) == "Multi frame" {
|
||||
if let type = VideoInterlacingType(rawValue: map[1].trimmingCharacters(in: .whitespacesAndNewlines)) {
|
||||
idetTypeMap[type] = (idetTypeMap[type] ?? 0) + 1
|
||||
let tff = idetTypeMap[.tff] ?? 0
|
||||
let bff = idetTypeMap[.bff] ?? 0
|
||||
let progressive = idetTypeMap[.progressive] ?? 0
|
||||
let undetermined = idetTypeMap[.undetermined] ?? 0
|
||||
if progressive - tff - bff > 100 {
|
||||
videoInterlacingType = .progressive
|
||||
autoDeInterlace = false
|
||||
} else if bff - progressive > 100 {
|
||||
videoInterlacingType = .bff
|
||||
autoDeInterlace = false
|
||||
} else if tff - progressive > 100 {
|
||||
videoInterlacingType = .tff
|
||||
autoDeInterlace = false
|
||||
} else if undetermined - progressive - tff - bff > 100 {
|
||||
videoInterlacingType = .undetermined
|
||||
autoDeInterlace = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
open func sei(string: String) {
|
||||
KSLog("sei \(string)")
|
||||
}
|
||||
|
||||
/**
|
||||
在创建解码器之前可以对KSOptions和assetTrack做一些处理。例如判断fieldOrder为tt或bb的话,那就自动加videofilters
|
||||
*/
|
||||
open func process(assetTrack: some MediaPlayerTrack) {
|
||||
if assetTrack.mediaType == .video {
|
||||
if [FFmpegFieldOrder.bb, .bt, .tt, .tb].contains(assetTrack.fieldOrder) {
|
||||
// todo 先不要用yadif_videotoolbox,不然会crash。这个后续在看下要怎么解决
|
||||
hardwareDecode = false
|
||||
asynchronousDecompression = false
|
||||
let yadif = hardwareDecode ? "yadif_videotoolbox" : "yadif"
|
||||
var yadifMode = KSOptions.yadifMode
|
||||
// if let assetTrack = assetTrack as? FFmpegAssetTrack {
|
||||
// if assetTrack.realFrameRate.num == 2 * assetTrack.avgFrameRate.num, assetTrack.realFrameRate.den == assetTrack.avgFrameRate.den {
|
||||
// if yadifMode == 1 {
|
||||
// yadifMode = 0
|
||||
// } else if yadifMode == 3 {
|
||||
// yadifMode = 2
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
if KSOptions.deInterlaceAddIdet {
|
||||
videoFilters.append("idet")
|
||||
}
|
||||
videoFilters.append("\(yadif)=mode=\(yadifMode):parity=-1:deint=1")
|
||||
if yadifMode == 1 || yadifMode == 3 {
|
||||
assetTrack.nominalFrameRate = assetTrack.nominalFrameRate * 2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
open func updateVideo(refreshRate: Float, isDovi: Bool, formatDescription: CMFormatDescription?) {
|
||||
#if os(tvOS) || os(xrOS)
|
||||
/**
|
||||
快速更改preferredDisplayCriteria,会导致isDisplayModeSwitchInProgress变成true。
|
||||
例如退出一个视频,然后在3s内重新进入的话。所以不判断isDisplayModeSwitchInProgress了
|
||||
*/
|
||||
guard let displayManager = UIApplication.shared.windows.first?.avDisplayManager,
|
||||
displayManager.isDisplayCriteriaMatchingEnabled
|
||||
else {
|
||||
return
|
||||
}
|
||||
if let dynamicRange = isDovi ? .dolbyVision : formatDescription?.dynamicRange {
|
||||
displayManager.preferredDisplayCriteria = AVDisplayCriteria(refreshRate: refreshRate, videoDynamicRange: dynamicRange.rawValue)
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
open func videoClockSync(main: KSClock, nextVideoTime: TimeInterval, fps: Double, frameCount: Int) -> (Double, ClockProcessType) {
|
||||
let desire = main.getTime() - videoDelay
|
||||
let diff = nextVideoTime - desire
|
||||
// print("[video] video diff \(diff) nextVideoTime \(nextVideoTime) main \(main.time.seconds)")
|
||||
if diff >= 1 / fps / 2 {
|
||||
videoClockDelayCount = 0
|
||||
return (diff, .remain)
|
||||
} else {
|
||||
if diff < -4 / fps {
|
||||
videoClockDelayCount += 1
|
||||
let log = "[video] video delay=\(diff), clock=\(desire), delay count=\(videoClockDelayCount), frameCount=\(frameCount)"
|
||||
if frameCount == 1 {
|
||||
if diff < -1, videoClockDelayCount % 10 == 0 {
|
||||
KSLog("\(log) drop gop Packet")
|
||||
return (diff, .dropGOPPacket)
|
||||
} else if videoClockDelayCount % 5 == 0 {
|
||||
KSLog("\(log) drop next frame")
|
||||
return (diff, .dropNextFrame)
|
||||
} else {
|
||||
return (diff, .next)
|
||||
}
|
||||
} else {
|
||||
if diff < -8, videoClockDelayCount % 100 == 0 {
|
||||
KSLog("\(log) seek video track")
|
||||
return (diff, .seek)
|
||||
}
|
||||
if diff < -1, videoClockDelayCount % 10 == 0 {
|
||||
KSLog("\(log) flush video track")
|
||||
return (diff, .flush)
|
||||
}
|
||||
if videoClockDelayCount % 2 == 0 {
|
||||
KSLog("\(log) drop next frame")
|
||||
return (diff, .dropNextFrame)
|
||||
} else {
|
||||
return (diff, .next)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
videoClockDelayCount = 0
|
||||
return (diff, .next)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
open func availableDynamicRange(_ contentRange: DynamicRange?) -> DynamicRange? {
|
||||
#if canImport(UIKit)
|
||||
let availableHDRModes = AVPlayer.availableHDRModes
|
||||
if let preferedDynamicRange = destinationDynamicRange {
|
||||
// value of 0 indicates that no HDR modes are supported.
|
||||
if availableHDRModes == AVPlayer.HDRMode(rawValue: 0) {
|
||||
return .sdr
|
||||
} else if availableHDRModes.contains(preferedDynamicRange.hdrMode) {
|
||||
return preferedDynamicRange
|
||||
} else if let contentRange,
|
||||
availableHDRModes.contains(contentRange.hdrMode)
|
||||
{
|
||||
return contentRange
|
||||
} else if preferedDynamicRange != .sdr { // trying update to HDR mode
|
||||
return availableHDRModes.dynamicRange
|
||||
}
|
||||
}
|
||||
return contentRange
|
||||
#else
|
||||
return destinationDynamicRange ?? contentRange
|
||||
#endif
|
||||
}
|
||||
|
||||
open func playerLayerDeinit() {
|
||||
#if os(tvOS) || os(xrOS)
|
||||
runOnMainThread {
|
||||
UIApplication.shared.windows.first?.avDisplayManager.preferredDisplayCriteria = nil
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
open func liveAdaptivePlaybackRate(loadingState _: LoadingState) -> Float? {
|
||||
nil
|
||||
// if loadingState.isFirst {
|
||||
// return nil
|
||||
// }
|
||||
// if loadingState.loadedTime > preferredForwardBufferDuration + 5 {
|
||||
// return 1.2
|
||||
// } else if loadingState.loadedTime < preferredForwardBufferDuration / 2 {
|
||||
// return 0.8
|
||||
// } else {
|
||||
// return 1
|
||||
// }
|
||||
}
|
||||
|
||||
open func process(url _: URL) -> AbstractAVIOContext? {
|
||||
nil
|
||||
}
|
||||
}
|
||||
|
||||
public enum VideoInterlacingType: String {
|
||||
case tff
|
||||
case bff
|
||||
case progressive
|
||||
case undetermined
|
||||
}
|
||||
|
||||
public extension KSOptions {
|
||||
static var firstPlayerType: MediaPlayerProtocol.Type = KSAVPlayer.self
|
||||
static var secondPlayerType: MediaPlayerProtocol.Type? = KSMEPlayer.self
|
||||
/// 最低缓存视频时间
|
||||
static var preferredForwardBufferDuration = 3.0
|
||||
/// 最大缓存视频时间
|
||||
static var maxBufferDuration = 30.0
|
||||
/// 是否开启秒开
|
||||
static var isSecondOpen = false
|
||||
/// 开启精确seek
|
||||
static var isAccurateSeek = false
|
||||
/// Applies to short videos only
|
||||
static var isLoopPlay = false
|
||||
/// 是否自动播放,默认true
|
||||
static var isAutoPlay = true
|
||||
/// seek完是否自动播放
|
||||
static var isSeekedAutoPlay = true
|
||||
static var hardwareDecode = true
|
||||
// 默认不用自研的硬解,因为有些视频的AVPacket的pts顺序是不对的,只有解码后的AVFrame里面的pts是对的。
|
||||
static var asynchronousDecompression = false
|
||||
static var isPipPopViewController = false
|
||||
static var canStartPictureInPictureAutomaticallyFromInline = true
|
||||
static var preferredFrame = true
|
||||
static var useSystemHTTPProxy = true
|
||||
/// 日志级别
|
||||
static var logLevel = LogLevel.warning
|
||||
static var logger: LogHandler = OSLog(lable: "KSPlayer")
|
||||
internal static func deviceCpuCount() -> Int {
|
||||
var ncpu = UInt(0)
|
||||
var len: size_t = MemoryLayout.size(ofValue: ncpu)
|
||||
sysctlbyname("hw.ncpu", &ncpu, &len, nil, 0)
|
||||
return Int(ncpu)
|
||||
}
|
||||
|
||||
static func setAudioSession() {
|
||||
#if os(macOS)
|
||||
// try? AVAudioSession.sharedInstance().setRouteSharingPolicy(.longFormAudio)
|
||||
#else
|
||||
var category = AVAudioSession.sharedInstance().category
|
||||
if category != .playAndRecord {
|
||||
category = .playback
|
||||
}
|
||||
#if os(tvOS)
|
||||
try? AVAudioSession.sharedInstance().setCategory(category, mode: .moviePlayback, policy: .longFormAudio)
|
||||
#else
|
||||
try? AVAudioSession.sharedInstance().setCategory(category, mode: .moviePlayback, policy: .longFormVideo)
|
||||
#endif
|
||||
try? AVAudioSession.sharedInstance().setActive(true)
|
||||
#endif
|
||||
}
|
||||
|
||||
#if !os(macOS)
|
||||
static func isSpatialAudioEnabled(channelCount _: AVAudioChannelCount) -> Bool {
|
||||
if #available(tvOS 15.0, iOS 15.0, *) {
|
||||
let isSpatialAudioEnabled = AVAudioSession.sharedInstance().currentRoute.outputs.contains { $0.isSpatialAudioEnabled }
|
||||
try? AVAudioSession.sharedInstance().setSupportsMultichannelContent(isSpatialAudioEnabled)
|
||||
return isSpatialAudioEnabled
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
static func outputNumberOfChannels(channelCount: AVAudioChannelCount) -> AVAudioChannelCount {
|
||||
let maximumOutputNumberOfChannels = AVAudioChannelCount(AVAudioSession.sharedInstance().maximumOutputNumberOfChannels)
|
||||
let preferredOutputNumberOfChannels = AVAudioChannelCount(AVAudioSession.sharedInstance().preferredOutputNumberOfChannels)
|
||||
let isSpatialAudioEnabled = isSpatialAudioEnabled(channelCount: channelCount)
|
||||
let isUseAudioRenderer = KSOptions.audioPlayerType == AudioRendererPlayer.self
|
||||
KSLog("[audio] maximumOutputNumberOfChannels: \(maximumOutputNumberOfChannels), preferredOutputNumberOfChannels: \(preferredOutputNumberOfChannels), isSpatialAudioEnabled: \(isSpatialAudioEnabled), isUseAudioRenderer: \(isUseAudioRenderer) ")
|
||||
let maxRouteChannelsCount = AVAudioSession.sharedInstance().currentRoute.outputs.compactMap {
|
||||
$0.channels?.count
|
||||
}.max() ?? 2
|
||||
KSLog("[audio] currentRoute max channels: \(maxRouteChannelsCount)")
|
||||
var channelCount = channelCount
|
||||
if channelCount > 2 {
|
||||
let minChannels = min(maximumOutputNumberOfChannels, channelCount)
|
||||
#if os(tvOS) || targetEnvironment(simulator)
|
||||
if !(isUseAudioRenderer && isSpatialAudioEnabled) {
|
||||
// 不要用maxRouteChannelsCount来判断,有可能会不准。导致多音道设备也返回2(一开始播放一个2声道,就容易出现),也不能用outputNumberOfChannels来判断,有可能会返回2
|
||||
// channelCount = AVAudioChannelCount(min(AVAudioSession.sharedInstance().outputNumberOfChannels, maxRouteChannelsCount))
|
||||
channelCount = minChannels
|
||||
}
|
||||
#else
|
||||
// iOS 外放是会自动有空间音频功能,但是蓝牙耳机有可能没有空间音频功能或者把空间音频给关了,。所以还是需要处理。
|
||||
if !isSpatialAudioEnabled {
|
||||
channelCount = minChannels
|
||||
}
|
||||
#endif
|
||||
} else {
|
||||
channelCount = 2
|
||||
}
|
||||
// 不在这里设置setPreferredOutputNumberOfChannels,因为这个方法会在获取音轨信息的时候,进行调用。
|
||||
KSLog("[audio] outputNumberOfChannels: \(AVAudioSession.sharedInstance().outputNumberOfChannels) output channelCount: \(channelCount)")
|
||||
return channelCount
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
public enum LogLevel: Int32, CustomStringConvertible {
|
||||
case panic = 0
|
||||
case fatal = 8
|
||||
case error = 16
|
||||
case warning = 24
|
||||
case info = 32
|
||||
case verbose = 40
|
||||
case debug = 48
|
||||
case trace = 56
|
||||
|
||||
public var description: String {
|
||||
switch self {
|
||||
case .panic:
|
||||
return "panic"
|
||||
case .fatal:
|
||||
return "fault"
|
||||
case .error:
|
||||
return "error"
|
||||
case .warning:
|
||||
return "warning"
|
||||
case .info:
|
||||
return "info"
|
||||
case .verbose:
|
||||
return "verbose"
|
||||
case .debug:
|
||||
return "debug"
|
||||
case .trace:
|
||||
return "trace"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public extension LogLevel {
|
||||
var logType: OSLogType {
|
||||
switch self {
|
||||
case .panic, .fatal:
|
||||
return .fault
|
||||
case .error:
|
||||
return .error
|
||||
case .warning:
|
||||
return .debug
|
||||
case .info, .verbose, .debug:
|
||||
return .info
|
||||
case .trace:
|
||||
return .default
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public protocol LogHandler {
|
||||
@inlinable
|
||||
func log(level: LogLevel, message: CustomStringConvertible, file: String, function: String, line: UInt)
|
||||
}
|
||||
|
||||
public class OSLog: LogHandler {
|
||||
public let label: String
|
||||
public init(lable: String) {
|
||||
label = lable
|
||||
}
|
||||
|
||||
@inlinable
|
||||
public func log(level: LogLevel, message: CustomStringConvertible, file: String, function: String, line: UInt) {
|
||||
os_log(level.logType, "%@ %@: %@:%d %@ | %@", level.description, label, file, line, function, message.description)
|
||||
}
|
||||
}
|
||||
|
||||
public class FileLog: LogHandler {
|
||||
public let fileHandle: FileHandle
|
||||
public let formatter = DateFormatter()
|
||||
public init(fileHandle: FileHandle) {
|
||||
self.fileHandle = fileHandle
|
||||
formatter.dateFormat = "MM-dd HH:mm:ss.SSSSSS"
|
||||
}
|
||||
|
||||
@inlinable
|
||||
public func log(level: LogLevel, message: CustomStringConvertible, file: String, function: String, line: UInt) {
|
||||
let string = String(format: "%@ %@ %@:%d %@ | %@\n", formatter.string(from: Date()), level.description, file, line, function, message.description)
|
||||
if let data = string.data(using: .utf8) {
|
||||
fileHandle.write(data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@inlinable
|
||||
public func KSLog(_ error: @autoclosure () -> Error, file: String = #file, function: String = #function, line: UInt = #line) {
|
||||
KSLog(level: .error, error().localizedDescription, file: file, function: function, line: line)
|
||||
}
|
||||
|
||||
@inlinable
|
||||
public func KSLog(level: LogLevel = .warning, _ message: @autoclosure () -> CustomStringConvertible, file: String = #file, function: String = #function, line: UInt = #line) {
|
||||
if level.rawValue <= KSOptions.logLevel.rawValue {
|
||||
let fileName = (file as NSString).lastPathComponent
|
||||
KSOptions.logger.log(level: level, message: message(), file: fileName, function: function, line: line)
|
||||
}
|
||||
}
|
||||
|
||||
@inlinable
|
||||
public func KSLog(level: LogLevel = .warning, dso: UnsafeRawPointer = #dsohandle, _ message: StaticString, _ args: CVarArg...) {
|
||||
if level.rawValue <= KSOptions.logLevel.rawValue {
|
||||
os_log(level.logType, dso: dso, message, args)
|
||||
}
|
||||
}
|
||||
|
||||
public extension Array {
|
||||
func toDictionary<Key: Hashable>(with selectKey: (Element) -> Key) -> [Key: Element] {
|
||||
var dict = [Key: Element]()
|
||||
forEach { element in
|
||||
dict[selectKey(element)] = element
|
||||
}
|
||||
return dict
|
||||
}
|
||||
}
|
||||
|
||||
public struct KSClock {
|
||||
public private(set) var lastMediaTime = CACurrentMediaTime()
|
||||
public internal(set) var position = Int64(0)
|
||||
public internal(set) var time = CMTime.zero {
|
||||
didSet {
|
||||
lastMediaTime = CACurrentMediaTime()
|
||||
}
|
||||
}
|
||||
|
||||
func getTime() -> TimeInterval {
|
||||
time.seconds + CACurrentMediaTime() - lastMediaTime
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
//
|
||||
// KSPictureInPictureController.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2023/1/28.
|
||||
//
|
||||
|
||||
import AVKit
|
||||
|
||||
@available(tvOS 14.0, *)
|
||||
public class KSPictureInPictureController: AVPictureInPictureController {
|
||||
private static var pipController: KSPictureInPictureController?
|
||||
private var originalViewController: UIViewController?
|
||||
private var view: KSPlayerLayer?
|
||||
private weak var viewController: UIViewController?
|
||||
private weak var presentingViewController: UIViewController?
|
||||
#if canImport(UIKit)
|
||||
private weak var navigationController: UINavigationController?
|
||||
#endif
|
||||
|
||||
func stop(restoreUserInterface: Bool) {
|
||||
stopPictureInPicture()
|
||||
delegate = nil
|
||||
guard KSOptions.isPipPopViewController else {
|
||||
return
|
||||
}
|
||||
KSPictureInPictureController.pipController = nil
|
||||
if restoreUserInterface {
|
||||
#if canImport(UIKit)
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self, let viewController, let originalViewController else { return }
|
||||
if let nav = viewController as? UINavigationController,
|
||||
nav.viewControllers.isEmpty || (nav.viewControllers.count == 1 && nav.viewControllers[0] != originalViewController)
|
||||
{
|
||||
nav.viewControllers = [originalViewController]
|
||||
}
|
||||
if let navigationController {
|
||||
var viewControllers = navigationController.viewControllers
|
||||
if viewControllers.count > 1, let last = viewControllers.last, type(of: last) == type(of: viewController) {
|
||||
viewControllers[viewControllers.count - 1] = viewController
|
||||
navigationController.viewControllers = viewControllers
|
||||
}
|
||||
if viewControllers.firstIndex(of: viewController) == nil {
|
||||
// 新的swiftUI push之后。view会变成是emptyView。所以页面就空白了。
|
||||
navigationController.pushViewController(viewController, animated: true)
|
||||
}
|
||||
} else {
|
||||
presentingViewController?.present(originalViewController, animated: true)
|
||||
}
|
||||
}
|
||||
#endif
|
||||
view?.player.isMuted = false
|
||||
view?.play()
|
||||
}
|
||||
|
||||
originalViewController = nil
|
||||
view = nil
|
||||
}
|
||||
|
||||
func start(view: KSPlayerLayer) {
|
||||
startPictureInPicture()
|
||||
delegate = view
|
||||
guard KSOptions.isPipPopViewController else {
|
||||
#if canImport(UIKit)
|
||||
// 直接退到后台
|
||||
runOnMainThread {
|
||||
UIControl().sendAction(#selector(URLSessionTask.suspend), to: UIApplication.shared, for: nil)
|
||||
}
|
||||
#endif
|
||||
return
|
||||
}
|
||||
self.view = view
|
||||
#if canImport(UIKit)
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self, let viewController = view.player.view?.viewController else { return }
|
||||
|
||||
originalViewController = viewController
|
||||
if let navigationController = viewController.navigationController, navigationController.viewControllers.count == 1 {
|
||||
self.viewController = navigationController
|
||||
} else {
|
||||
self.viewController = viewController
|
||||
}
|
||||
navigationController = self.viewController?.navigationController
|
||||
if let pre = KSPictureInPictureController.pipController {
|
||||
view.player.isMuted = true
|
||||
pre.view?.isPipActive = false
|
||||
} else {
|
||||
if let navigationController {
|
||||
navigationController.popViewController(animated: true)
|
||||
#if os(iOS)
|
||||
if navigationController.tabBarController != nil, navigationController.viewControllers.count == 1 {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.navigationController?.setToolbarHidden(false, animated: true)
|
||||
}
|
||||
}
|
||||
#endif
|
||||
} else {
|
||||
presentingViewController = originalViewController?.presentingViewController
|
||||
originalViewController?.dismiss(animated: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
KSPictureInPictureController.pipController = self
|
||||
}
|
||||
|
||||
static func mute() {
|
||||
pipController?.view?.player.isMuted = true
|
||||
}
|
||||
}
|
||||
707
KSPlayer-main/Sources/KSPlayer/AVPlayer/KSPlayerLayer.swift
Normal file
707
KSPlayer-main/Sources/KSPlayer/AVPlayer/KSPlayerLayer.swift
Normal file
@@ -0,0 +1,707 @@
|
||||
//
|
||||
// KSPlayerLayerView.swift
|
||||
// Pods
|
||||
//
|
||||
// Created by kintan on 16/4/28.
|
||||
//
|
||||
//
|
||||
import AVFoundation
|
||||
import AVKit
|
||||
import MediaPlayer
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
|
||||
/**
|
||||
Player status emun
|
||||
- setURL: set url
|
||||
- readyToPlay: player ready to play
|
||||
- buffering: player buffering
|
||||
- bufferFinished: buffer finished
|
||||
- playedToTheEnd: played to the End
|
||||
- error: error with playing
|
||||
*/
|
||||
public enum KSPlayerState: CustomStringConvertible {
|
||||
case initialized
|
||||
case preparing
|
||||
case readyToPlay
|
||||
case buffering
|
||||
case bufferFinished
|
||||
case paused
|
||||
case playedToTheEnd
|
||||
case error
|
||||
public var description: String {
|
||||
switch self {
|
||||
case .initialized:
|
||||
return "initialized"
|
||||
case .preparing:
|
||||
return "preparing"
|
||||
case .readyToPlay:
|
||||
return "readyToPlay"
|
||||
case .buffering:
|
||||
return "buffering"
|
||||
case .bufferFinished:
|
||||
return "bufferFinished"
|
||||
case .paused:
|
||||
return "paused"
|
||||
case .playedToTheEnd:
|
||||
return "playedToTheEnd"
|
||||
case .error:
|
||||
return "error"
|
||||
}
|
||||
}
|
||||
|
||||
public var isPlaying: Bool { self == .buffering || self == .bufferFinished }
|
||||
}
|
||||
|
||||
@MainActor
|
||||
public protocol KSPlayerLayerDelegate: AnyObject {
|
||||
func player(layer: KSPlayerLayer, state: KSPlayerState)
|
||||
func player(layer: KSPlayerLayer, currentTime: TimeInterval, totalTime: TimeInterval)
|
||||
func player(layer: KSPlayerLayer, finish error: Error?)
|
||||
func player(layer: KSPlayerLayer, bufferedCount: Int, consumeTime: TimeInterval)
|
||||
}
|
||||
|
||||
open class KSPlayerLayer: NSObject {
|
||||
public weak var delegate: KSPlayerLayerDelegate?
|
||||
@Published
|
||||
public var bufferingProgress: Int = 0
|
||||
@Published
|
||||
public var loopCount: Int = 0
|
||||
@Published
|
||||
public var isPipActive = false {
|
||||
didSet {
|
||||
if #available(tvOS 14.0, *) {
|
||||
guard let pipController = player.pipController else {
|
||||
return
|
||||
}
|
||||
|
||||
if isPipActive {
|
||||
// 一定要async才不会pip之后就暂停播放
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
pipController.start(view: self)
|
||||
}
|
||||
} else {
|
||||
pipController.stop(restoreUserInterface: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public private(set) var options: KSOptions
|
||||
|
||||
public var player: MediaPlayerProtocol {
|
||||
didSet {
|
||||
KSLog("player is \(player)")
|
||||
state = .initialized
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
if let oldView = oldValue.view, let superview = oldView.superview, let view = player.view {
|
||||
#if canImport(UIKit)
|
||||
superview.insertSubview(view, belowSubview: oldView)
|
||||
#else
|
||||
superview.addSubview(view, positioned: .below, relativeTo: oldView)
|
||||
#endif
|
||||
view.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
view.topAnchor.constraint(equalTo: superview.topAnchor),
|
||||
view.leadingAnchor.constraint(equalTo: superview.leadingAnchor),
|
||||
view.bottomAnchor.constraint(equalTo: superview.bottomAnchor),
|
||||
view.trailingAnchor.constraint(equalTo: superview.trailingAnchor),
|
||||
])
|
||||
}
|
||||
oldValue.view?.removeFromSuperview()
|
||||
}
|
||||
player.playbackRate = oldValue.playbackRate
|
||||
player.playbackVolume = oldValue.playbackVolume
|
||||
player.delegate = self
|
||||
player.contentMode = .scaleAspectFit
|
||||
if isAutoPlay {
|
||||
prepareToPlay()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public private(set) var url: URL {
|
||||
didSet {
|
||||
let firstPlayerType: MediaPlayerProtocol.Type
|
||||
if isWirelessRouteActive {
|
||||
// airplay的话,默认使用KSAVPlayer
|
||||
firstPlayerType = KSAVPlayer.self
|
||||
} else if options.display != .plane {
|
||||
// AR模式只能用KSMEPlayer
|
||||
// swiftlint:disable force_cast
|
||||
firstPlayerType = NSClassFromString("KSPlayer.KSMEPlayer") as! MediaPlayerProtocol.Type
|
||||
// swiftlint:enable force_cast
|
||||
} else {
|
||||
firstPlayerType = KSOptions.firstPlayerType
|
||||
}
|
||||
if type(of: player) == firstPlayerType {
|
||||
if url == oldValue {
|
||||
if isAutoPlay {
|
||||
play()
|
||||
}
|
||||
} else {
|
||||
stop()
|
||||
player.replace(url: url, options: options)
|
||||
if isAutoPlay {
|
||||
prepareToPlay()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
stop()
|
||||
player = firstPlayerType.init(url: url, options: options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// 播发器的几种状态
|
||||
|
||||
public private(set) var state = KSPlayerState.initialized {
|
||||
willSet {
|
||||
if state != newValue {
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
KSLog("playerStateDidChange - \(newValue)")
|
||||
self.delegate?.player(layer: self, state: newValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private lazy var timer: Timer = .scheduledTimer(withTimeInterval: 0.1, repeats: true) { [weak self] _ in
|
||||
guard let self, self.player.isReadyToPlay else {
|
||||
return
|
||||
}
|
||||
self.delegate?.player(layer: self, currentTime: self.player.currentPlaybackTime, totalTime: self.player.duration)
|
||||
if self.player.playbackState == .playing, self.player.loadState == .playable, self.state == .buffering {
|
||||
// 一个兜底保护,正常不能走到这里
|
||||
self.state = .bufferFinished
|
||||
}
|
||||
if self.player.isPlaying {
|
||||
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPNowPlayingInfoPropertyElapsedPlaybackTime] = self.player.currentPlaybackTime
|
||||
}
|
||||
}
|
||||
|
||||
private var urls = [URL]()
|
||||
private var isAutoPlay: Bool
|
||||
private var isWirelessRouteActive = false
|
||||
private var bufferedCount = 0
|
||||
private var shouldSeekTo: TimeInterval = 0
|
||||
private var startTime: TimeInterval = 0
|
||||
public init(url: URL, isAutoPlay: Bool = KSOptions.isAutoPlay, options: KSOptions, delegate: KSPlayerLayerDelegate? = nil) {
|
||||
self.url = url
|
||||
self.options = options
|
||||
self.delegate = delegate
|
||||
let firstPlayerType: MediaPlayerProtocol.Type
|
||||
if options.display != .plane {
|
||||
// AR模式只能用KSMEPlayer
|
||||
// swiftlint:disable force_cast
|
||||
firstPlayerType = NSClassFromString("KSPlayer.KSMEPlayer") as! MediaPlayerProtocol.Type
|
||||
// swiftlint:enable force_cast
|
||||
} else {
|
||||
firstPlayerType = KSOptions.firstPlayerType
|
||||
}
|
||||
player = firstPlayerType.init(url: url, options: options)
|
||||
self.isAutoPlay = isAutoPlay
|
||||
super.init()
|
||||
player.playbackRate = options.startPlayRate
|
||||
if options.registerRemoteControll {
|
||||
registerRemoteControllEvent()
|
||||
}
|
||||
player.delegate = self
|
||||
player.contentMode = .scaleAspectFit
|
||||
if isAutoPlay {
|
||||
prepareToPlay()
|
||||
}
|
||||
#if canImport(UIKit)
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(enterBackground), name: UIApplication.didEnterBackgroundNotification, object: nil)
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(enterForeground), name: UIApplication.willEnterForegroundNotification, object: nil)
|
||||
}
|
||||
#if !os(xrOS)
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(wirelessRouteActiveDidChange(notification:)), name: .MPVolumeViewWirelessRouteActiveDidChange, object: nil)
|
||||
#endif
|
||||
#endif
|
||||
#if !os(macOS)
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(audioInterrupted), name: AVAudioSession.interruptionNotification, object: nil)
|
||||
#endif
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
public required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
deinit {
|
||||
if #available(iOS 15.0, tvOS 15.0, macOS 12.0, *) {
|
||||
player.pipController?.contentSource = nil
|
||||
}
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
MPNowPlayingInfoCenter.default().nowPlayingInfo = nil
|
||||
MPRemoteCommandCenter.shared().playCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().pauseCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().togglePlayPauseCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().stopCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().nextTrackCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().previousTrackCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().changeRepeatModeCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().changePlaybackRateCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().skipForwardCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().skipBackwardCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().changePlaybackPositionCommand.removeTarget(nil)
|
||||
MPRemoteCommandCenter.shared().enableLanguageOptionCommand.removeTarget(nil)
|
||||
options.playerLayerDeinit()
|
||||
}
|
||||
|
||||
public func set(url: URL, options: KSOptions) {
|
||||
self.options = options
|
||||
runOnMainThread {
|
||||
self.url = url
|
||||
}
|
||||
}
|
||||
|
||||
public func set(urls: [URL], options: KSOptions) {
|
||||
self.options = options
|
||||
self.urls.removeAll()
|
||||
self.urls.append(contentsOf: urls)
|
||||
if let first = urls.first {
|
||||
runOnMainThread {
|
||||
self.url = first
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
open func play() {
|
||||
runOnMainThread {
|
||||
UIApplication.shared.isIdleTimerDisabled = true
|
||||
}
|
||||
isAutoPlay = true
|
||||
if state == .error || state == .initialized {
|
||||
prepareToPlay()
|
||||
}
|
||||
if player.isReadyToPlay {
|
||||
if state == .playedToTheEnd {
|
||||
player.seek(time: 0) { [weak self] finished in
|
||||
guard let self else { return }
|
||||
if finished {
|
||||
self.player.play()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
player.play()
|
||||
}
|
||||
timer.fireDate = Date.distantPast
|
||||
}
|
||||
state = player.loadState == .playable ? .bufferFinished : .buffering
|
||||
MPNowPlayingInfoCenter.default().playbackState = .playing
|
||||
if #available(tvOS 14.0, *) {
|
||||
KSPictureInPictureController.mute()
|
||||
}
|
||||
}
|
||||
|
||||
open func pause() {
|
||||
isAutoPlay = false
|
||||
player.pause()
|
||||
timer.fireDate = Date.distantFuture
|
||||
state = .paused
|
||||
MPNowPlayingInfoCenter.default().playbackState = .paused
|
||||
runOnMainThread {
|
||||
UIApplication.shared.isIdleTimerDisabled = false
|
||||
}
|
||||
}
|
||||
|
||||
public func stop() {
|
||||
KSLog("stop Player")
|
||||
state = .initialized
|
||||
player.shutdown()
|
||||
bufferedCount = 0
|
||||
shouldSeekTo = 0
|
||||
player.playbackRate = 1
|
||||
player.playbackVolume = 1
|
||||
MPNowPlayingInfoCenter.default().nowPlayingInfo = nil
|
||||
runOnMainThread {
|
||||
UIApplication.shared.isIdleTimerDisabled = false
|
||||
}
|
||||
}
|
||||
|
||||
open func seek(time: TimeInterval, autoPlay: Bool, completion: @escaping ((Bool) -> Void)) {
|
||||
if time.isInfinite || time.isNaN {
|
||||
completion(false)
|
||||
}
|
||||
if player.isReadyToPlay, player.seekable {
|
||||
player.seek(time: time) { [weak self] finished in
|
||||
guard let self else { return }
|
||||
if finished, autoPlay {
|
||||
self.play()
|
||||
}
|
||||
completion(finished)
|
||||
}
|
||||
} else {
|
||||
isAutoPlay = autoPlay
|
||||
shouldSeekTo = time
|
||||
completion(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - MediaPlayerDelegate
|
||||
|
||||
extension KSPlayerLayer: MediaPlayerDelegate {
|
||||
public func readyToPlay(player: some MediaPlayerProtocol) {
|
||||
state = .readyToPlay
|
||||
#if os(macOS)
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
if let window = player.view?.window {
|
||||
window.isMovableByWindowBackground = true
|
||||
if options.automaticWindowResize {
|
||||
let naturalSize = player.naturalSize
|
||||
if naturalSize.width > 0, naturalSize.height > 0 {
|
||||
window.aspectRatio = naturalSize
|
||||
var frame = window.frame
|
||||
frame.size.height = frame.width * naturalSize.height / naturalSize.width
|
||||
window.setFrame(frame, display: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#if !os(macOS) && !os(tvOS)
|
||||
if #available(iOS 14.2, *) {
|
||||
if options.canStartPictureInPictureAutomaticallyFromInline {
|
||||
player.pipController?.canStartPictureInPictureAutomaticallyFromInline = true
|
||||
}
|
||||
}
|
||||
#endif
|
||||
updateNowPlayingInfo()
|
||||
if isAutoPlay {
|
||||
if shouldSeekTo > 0 {
|
||||
seek(time: shouldSeekTo, autoPlay: true) { [weak self] _ in
|
||||
guard let self else { return }
|
||||
self.shouldSeekTo = 0
|
||||
}
|
||||
|
||||
} else {
|
||||
play()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func changeLoadState(player: some MediaPlayerProtocol) {
|
||||
guard player.playbackState != .seeking else { return }
|
||||
if player.loadState == .playable, startTime > 0 {
|
||||
let diff = CACurrentMediaTime() - startTime
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
delegate?.player(layer: self, bufferedCount: bufferedCount, consumeTime: diff)
|
||||
}
|
||||
if bufferedCount == 0 {
|
||||
var dic = ["firstTime": diff]
|
||||
if options.tcpConnectedTime > 0 {
|
||||
dic["initTime"] = options.dnsStartTime - startTime
|
||||
dic["dnsTime"] = options.tcpStartTime - options.dnsStartTime
|
||||
dic["tcpTime"] = options.tcpConnectedTime - options.tcpStartTime
|
||||
dic["openTime"] = options.openTime - options.tcpConnectedTime
|
||||
dic["findTime"] = options.findTime - options.openTime
|
||||
} else {
|
||||
dic["openTime"] = options.openTime - startTime
|
||||
}
|
||||
dic["findTime"] = options.findTime - options.openTime
|
||||
dic["readyTime"] = options.readyTime - options.findTime
|
||||
dic["readVideoTime"] = options.readVideoTime - options.readyTime
|
||||
dic["readAudioTime"] = options.readAudioTime - options.readyTime
|
||||
dic["decodeVideoTime"] = options.decodeVideoTime - options.readVideoTime
|
||||
dic["decodeAudioTime"] = options.decodeAudioTime - options.readAudioTime
|
||||
KSLog(dic)
|
||||
}
|
||||
bufferedCount += 1
|
||||
startTime = 0
|
||||
}
|
||||
guard state.isPlaying else { return }
|
||||
if player.loadState == .playable {
|
||||
state = .bufferFinished
|
||||
} else {
|
||||
if state == .bufferFinished {
|
||||
startTime = CACurrentMediaTime()
|
||||
}
|
||||
state = .buffering
|
||||
}
|
||||
}
|
||||
|
||||
public func changeBuffering(player _: some MediaPlayerProtocol, progress: Int) {
|
||||
bufferingProgress = progress
|
||||
}
|
||||
|
||||
public func playBack(player _: some MediaPlayerProtocol, loopCount: Int) {
|
||||
self.loopCount = loopCount
|
||||
}
|
||||
|
||||
public func finish(player: some MediaPlayerProtocol, error: Error?) {
|
||||
if let error {
|
||||
if type(of: player) != KSOptions.secondPlayerType, let secondPlayerType = KSOptions.secondPlayerType {
|
||||
self.player = secondPlayerType.init(url: url, options: options)
|
||||
return
|
||||
}
|
||||
state = .error
|
||||
KSLog(error as CustomStringConvertible)
|
||||
} else {
|
||||
let duration = player.duration
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
delegate?.player(layer: self, currentTime: duration, totalTime: duration)
|
||||
}
|
||||
state = .playedToTheEnd
|
||||
}
|
||||
timer.fireDate = Date.distantFuture
|
||||
bufferedCount = 1
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
delegate?.player(layer: self, finish: error)
|
||||
}
|
||||
if error == nil {
|
||||
nextPlayer()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - AVPictureInPictureControllerDelegate
|
||||
|
||||
@available(tvOS 14.0, *)
|
||||
extension KSPlayerLayer: AVPictureInPictureControllerDelegate {
|
||||
public func pictureInPictureControllerDidStopPictureInPicture(_: AVPictureInPictureController) {
|
||||
player.pipController?.stop(restoreUserInterface: false)
|
||||
}
|
||||
|
||||
public func pictureInPictureController(_: AVPictureInPictureController, restoreUserInterfaceForPictureInPictureStopWithCompletionHandler _: @escaping (Bool) -> Void) {
|
||||
isPipActive = false
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - private functions
|
||||
|
||||
extension KSPlayerLayer {
|
||||
open func prepareToPlay() {
|
||||
state = .preparing
|
||||
startTime = CACurrentMediaTime()
|
||||
bufferedCount = 0
|
||||
player.prepareToPlay()
|
||||
}
|
||||
|
||||
private func updateNowPlayingInfo() {
|
||||
if MPNowPlayingInfoCenter.default().nowPlayingInfo == nil {
|
||||
MPNowPlayingInfoCenter.default().nowPlayingInfo = [MPMediaItemPropertyPlaybackDuration: player.duration]
|
||||
} else {
|
||||
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPMediaItemPropertyPlaybackDuration] = player.duration
|
||||
}
|
||||
if MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPMediaItemPropertyTitle] == nil, let title = player.dynamicInfo?.metadata["title"] {
|
||||
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPMediaItemPropertyTitle] = title
|
||||
}
|
||||
if MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPMediaItemPropertyArtist] == nil, let artist = player.dynamicInfo?.metadata["artist"] {
|
||||
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPMediaItemPropertyArtist] = artist
|
||||
}
|
||||
var current: [MPNowPlayingInfoLanguageOption] = []
|
||||
var langs: [MPNowPlayingInfoLanguageOptionGroup] = []
|
||||
for track in player.tracks(mediaType: .audio) {
|
||||
if let lang = track.language {
|
||||
let audioLang = MPNowPlayingInfoLanguageOption(type: .audible, languageTag: lang, characteristics: nil, displayName: track.name, identifier: track.name)
|
||||
let audioGroup = MPNowPlayingInfoLanguageOptionGroup(languageOptions: [audioLang], defaultLanguageOption: nil, allowEmptySelection: false)
|
||||
langs.append(audioGroup)
|
||||
if track.isEnabled {
|
||||
current.append(audioLang)
|
||||
}
|
||||
}
|
||||
}
|
||||
if !langs.isEmpty {
|
||||
MPRemoteCommandCenter.shared().enableLanguageOptionCommand.isEnabled = true
|
||||
}
|
||||
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPNowPlayingInfoPropertyAvailableLanguageOptions] = langs
|
||||
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPNowPlayingInfoPropertyCurrentLanguageOptions] = current
|
||||
}
|
||||
|
||||
private func nextPlayer() {
|
||||
if urls.count > 1, let index = urls.firstIndex(of: url), index < urls.count - 1 {
|
||||
isAutoPlay = true
|
||||
url = urls[index + 1]
|
||||
}
|
||||
}
|
||||
|
||||
private func previousPlayer() {
|
||||
if urls.count > 1, let index = urls.firstIndex(of: url), index > 0 {
|
||||
isAutoPlay = true
|
||||
url = urls[index - 1]
|
||||
}
|
||||
}
|
||||
|
||||
func seek(time: TimeInterval) {
|
||||
seek(time: time, autoPlay: options.isSeekedAutoPlay) { _ in
|
||||
}
|
||||
}
|
||||
|
||||
public func registerRemoteControllEvent() {
|
||||
let remoteCommand = MPRemoteCommandCenter.shared()
|
||||
remoteCommand.playCommand.addTarget { [weak self] _ in
|
||||
guard let self else {
|
||||
return .commandFailed
|
||||
}
|
||||
self.play()
|
||||
return .success
|
||||
}
|
||||
remoteCommand.pauseCommand.addTarget { [weak self] _ in
|
||||
guard let self else {
|
||||
return .commandFailed
|
||||
}
|
||||
self.pause()
|
||||
return .success
|
||||
}
|
||||
remoteCommand.togglePlayPauseCommand.addTarget { [weak self] _ in
|
||||
guard let self else {
|
||||
return .commandFailed
|
||||
}
|
||||
if self.state.isPlaying {
|
||||
self.pause()
|
||||
} else {
|
||||
self.play()
|
||||
}
|
||||
return .success
|
||||
}
|
||||
remoteCommand.stopCommand.addTarget { [weak self] _ in
|
||||
guard let self else {
|
||||
return .commandFailed
|
||||
}
|
||||
self.player.shutdown()
|
||||
return .success
|
||||
}
|
||||
remoteCommand.nextTrackCommand.addTarget { [weak self] _ in
|
||||
guard let self else {
|
||||
return .commandFailed
|
||||
}
|
||||
self.nextPlayer()
|
||||
return .success
|
||||
}
|
||||
remoteCommand.previousTrackCommand.addTarget { [weak self] _ in
|
||||
guard let self else {
|
||||
return .commandFailed
|
||||
}
|
||||
self.previousPlayer()
|
||||
return .success
|
||||
}
|
||||
remoteCommand.changeRepeatModeCommand.addTarget { [weak self] event in
|
||||
guard let self, let event = event as? MPChangeRepeatModeCommandEvent else {
|
||||
return .commandFailed
|
||||
}
|
||||
self.options.isLoopPlay = event.repeatType != .off
|
||||
return .success
|
||||
}
|
||||
remoteCommand.changeShuffleModeCommand.isEnabled = false
|
||||
// remoteCommand.changeShuffleModeCommand.addTarget {})
|
||||
remoteCommand.changePlaybackRateCommand.supportedPlaybackRates = [0.5, 1, 1.5, 2]
|
||||
remoteCommand.changePlaybackRateCommand.addTarget { [weak self] event in
|
||||
guard let self, let event = event as? MPChangePlaybackRateCommandEvent else {
|
||||
return .commandFailed
|
||||
}
|
||||
self.player.playbackRate = event.playbackRate
|
||||
return .success
|
||||
}
|
||||
remoteCommand.skipForwardCommand.preferredIntervals = [15]
|
||||
remoteCommand.skipForwardCommand.addTarget { [weak self] event in
|
||||
guard let self, let event = event as? MPSkipIntervalCommandEvent else {
|
||||
return .commandFailed
|
||||
}
|
||||
self.seek(time: self.player.currentPlaybackTime + event.interval)
|
||||
return .success
|
||||
}
|
||||
remoteCommand.skipBackwardCommand.preferredIntervals = [15]
|
||||
remoteCommand.skipBackwardCommand.addTarget { [weak self] event in
|
||||
guard let self, let event = event as? MPSkipIntervalCommandEvent else {
|
||||
return .commandFailed
|
||||
}
|
||||
self.seek(time: self.player.currentPlaybackTime - event.interval)
|
||||
return .success
|
||||
}
|
||||
remoteCommand.changePlaybackPositionCommand.addTarget { [weak self] event in
|
||||
guard let self, let event = event as? MPChangePlaybackPositionCommandEvent else {
|
||||
return .commandFailed
|
||||
}
|
||||
self.seek(time: event.positionTime)
|
||||
return .success
|
||||
}
|
||||
remoteCommand.enableLanguageOptionCommand.addTarget { [weak self] event in
|
||||
guard let self, let event = event as? MPChangeLanguageOptionCommandEvent else {
|
||||
return .commandFailed
|
||||
}
|
||||
let selectLang = event.languageOption
|
||||
if selectLang.languageOptionType == .audible,
|
||||
let trackToSelect = self.player.tracks(mediaType: .audio).first(where: { $0.name == selectLang.displayName })
|
||||
{
|
||||
self.player.select(track: trackToSelect)
|
||||
}
|
||||
return .success
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func enterBackground() {
|
||||
guard state.isPlaying, !player.isExternalPlaybackActive else {
|
||||
return
|
||||
}
|
||||
if #available(tvOS 14.0, *), player.pipController?.isPictureInPictureActive == true {
|
||||
return
|
||||
}
|
||||
|
||||
if KSOptions.canBackgroundPlay {
|
||||
player.enterBackground()
|
||||
return
|
||||
}
|
||||
pause()
|
||||
}
|
||||
|
||||
@objc private func enterForeground() {
|
||||
if KSOptions.canBackgroundPlay {
|
||||
player.enterForeground()
|
||||
}
|
||||
}
|
||||
|
||||
#if canImport(UIKit) && !os(xrOS)
|
||||
@MainActor
|
||||
@objc private func wirelessRouteActiveDidChange(notification: Notification) {
|
||||
guard let volumeView = notification.object as? MPVolumeView, isWirelessRouteActive != volumeView.isWirelessRouteActive else { return }
|
||||
if volumeView.isWirelessRouteActive {
|
||||
if !player.allowsExternalPlayback {
|
||||
isWirelessRouteActive = true
|
||||
}
|
||||
player.usesExternalPlaybackWhileExternalScreenIsActive = true
|
||||
}
|
||||
isWirelessRouteActive = volumeView.isWirelessRouteActive
|
||||
}
|
||||
#endif
|
||||
#if !os(macOS)
|
||||
@objc private func audioInterrupted(notification: Notification) {
|
||||
guard let userInfo = notification.userInfo,
|
||||
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
|
||||
let type = AVAudioSession.InterruptionType(rawValue: typeValue)
|
||||
else {
|
||||
return
|
||||
}
|
||||
switch type {
|
||||
case .began:
|
||||
pause()
|
||||
|
||||
case .ended:
|
||||
// An interruption ended. Resume playback, if appropriate.
|
||||
|
||||
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
|
||||
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
|
||||
if options.contains(.shouldResume) {
|
||||
play()
|
||||
}
|
||||
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
336
KSPlayer-main/Sources/KSPlayer/AVPlayer/KSVideoPlayer.swift
Normal file
336
KSPlayer-main/Sources/KSPlayer/AVPlayer/KSVideoPlayer.swift
Normal file
@@ -0,0 +1,336 @@
|
||||
//
|
||||
// KSVideoPlayer.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2023/2/11.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import SwiftUI
|
||||
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
|
||||
public typealias UIViewRepresentable = NSViewRepresentable
|
||||
#endif
|
||||
|
||||
public struct KSVideoPlayer {
|
||||
public private(set) var coordinator: Coordinator
|
||||
public let url: URL
|
||||
public let options: KSOptions
|
||||
public init(coordinator: Coordinator, url: URL, options: KSOptions) {
|
||||
self.coordinator = coordinator
|
||||
self.url = url
|
||||
self.options = options
|
||||
}
|
||||
}
|
||||
|
||||
extension KSVideoPlayer: UIViewRepresentable {
|
||||
public func makeCoordinator() -> Coordinator {
|
||||
coordinator
|
||||
}
|
||||
|
||||
#if canImport(UIKit)
|
||||
public typealias UIViewType = UIView
|
||||
public func makeUIView(context: Context) -> UIViewType {
|
||||
context.coordinator.makeView(url: url, options: options)
|
||||
}
|
||||
|
||||
public func updateUIView(_ view: UIViewType, context: Context) {
|
||||
updateView(view, context: context)
|
||||
}
|
||||
|
||||
// iOS tvOS真机先调用onDisappear在调用dismantleUIView,但是模拟器就反过来了。
|
||||
public static func dismantleUIView(_: UIViewType, coordinator: Coordinator) {
|
||||
coordinator.resetPlayer()
|
||||
}
|
||||
#else
|
||||
public typealias NSViewType = UIView
|
||||
public func makeNSView(context: Context) -> NSViewType {
|
||||
context.coordinator.makeView(url: url, options: options)
|
||||
}
|
||||
|
||||
public func updateNSView(_ view: NSViewType, context: Context) {
|
||||
updateView(view, context: context)
|
||||
}
|
||||
|
||||
// macOS先调用onDisappear在调用dismantleNSView
|
||||
public static func dismantleNSView(_ view: NSViewType, coordinator: Coordinator) {
|
||||
coordinator.resetPlayer()
|
||||
view.window?.aspectRatio = CGSize(width: 16, height: 9)
|
||||
}
|
||||
#endif
|
||||
|
||||
@MainActor
|
||||
private func updateView(_: UIView, context: Context) {
|
||||
if context.coordinator.playerLayer?.url != url {
|
||||
_ = context.coordinator.makeView(url: url, options: options)
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
public final class Coordinator: ObservableObject {
|
||||
public var state: KSPlayerState {
|
||||
playerLayer?.state ?? .initialized
|
||||
}
|
||||
|
||||
@Published
|
||||
public var isMuted: Bool = false {
|
||||
didSet {
|
||||
playerLayer?.player.isMuted = isMuted
|
||||
}
|
||||
}
|
||||
|
||||
@Published
|
||||
public var playbackVolume: Float = 1.0 {
|
||||
didSet {
|
||||
playerLayer?.player.playbackVolume = playbackVolume
|
||||
}
|
||||
}
|
||||
|
||||
@Published
|
||||
public var isScaleAspectFill = false {
|
||||
didSet {
|
||||
playerLayer?.player.contentMode = isScaleAspectFill ? .scaleAspectFill : .scaleAspectFit
|
||||
}
|
||||
}
|
||||
|
||||
@Published
|
||||
public var playbackRate: Float = 1.0 {
|
||||
didSet {
|
||||
playerLayer?.player.playbackRate = playbackRate
|
||||
}
|
||||
}
|
||||
|
||||
@Published
|
||||
@MainActor
|
||||
public var isMaskShow = true {
|
||||
didSet {
|
||||
if isMaskShow != oldValue {
|
||||
mask(show: isMaskShow)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var subtitleModel = SubtitleModel()
|
||||
public var timemodel = ControllerTimeModel()
|
||||
// 在SplitView模式下,第二次进入会先调用makeUIView。然后在调用之前的dismantleUIView.所以如果进入的是同一个View的话,就会导致playerLayer被清空了。最准确的方式是在onDisappear清空playerLayer
|
||||
public var playerLayer: KSPlayerLayer? {
|
||||
didSet {
|
||||
oldValue?.delegate = nil
|
||||
oldValue?.pause()
|
||||
}
|
||||
}
|
||||
|
||||
private var delayHide: DispatchWorkItem?
|
||||
public var onPlay: ((TimeInterval, TimeInterval) -> Void)?
|
||||
public var onFinish: ((KSPlayerLayer, Error?) -> Void)?
|
||||
public var onStateChanged: ((KSPlayerLayer, KSPlayerState) -> Void)?
|
||||
public var onBufferChanged: ((Int, TimeInterval) -> Void)?
|
||||
#if canImport(UIKit)
|
||||
fileprivate var onSwipe: ((UISwipeGestureRecognizer.Direction) -> Void)?
|
||||
@objc fileprivate func swipeGestureAction(_ recognizer: UISwipeGestureRecognizer) {
|
||||
onSwipe?(recognizer.direction)
|
||||
}
|
||||
#endif
|
||||
|
||||
public init() {}
|
||||
|
||||
public func makeView(url: URL, options: KSOptions) -> UIView {
|
||||
defer {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.subtitleModel.url = url
|
||||
}
|
||||
}
|
||||
if let playerLayer {
|
||||
if playerLayer.url == url {
|
||||
return playerLayer.player.view ?? UIView()
|
||||
}
|
||||
playerLayer.delegate = nil
|
||||
playerLayer.set(url: url, options: options)
|
||||
playerLayer.delegate = self
|
||||
return playerLayer.player.view ?? UIView()
|
||||
} else {
|
||||
let playerLayer = KSPlayerLayer(url: url, options: options, delegate: self)
|
||||
self.playerLayer = playerLayer
|
||||
return playerLayer.player.view ?? UIView()
|
||||
}
|
||||
}
|
||||
|
||||
public func resetPlayer() {
|
||||
onStateChanged = nil
|
||||
onPlay = nil
|
||||
onFinish = nil
|
||||
onBufferChanged = nil
|
||||
#if canImport(UIKit)
|
||||
onSwipe = nil
|
||||
#endif
|
||||
playerLayer = nil
|
||||
delayHide?.cancel()
|
||||
delayHide = nil
|
||||
subtitleModel.selectedSubtitleInfo?.isEnabled = false
|
||||
}
|
||||
|
||||
public func skip(interval: Int) {
|
||||
if let playerLayer {
|
||||
seek(time: playerLayer.player.currentPlaybackTime + TimeInterval(interval))
|
||||
}
|
||||
}
|
||||
|
||||
public func seek(time: TimeInterval) {
|
||||
playerLayer?.seek(time: TimeInterval(time))
|
||||
}
|
||||
|
||||
@MainActor
|
||||
public func mask(show: Bool, autoHide: Bool = true) {
|
||||
isMaskShow = show
|
||||
if show {
|
||||
delayHide?.cancel()
|
||||
// 播放的时候才自动隐藏
|
||||
guard state == .bufferFinished else { return }
|
||||
if autoHide {
|
||||
delayHide = DispatchWorkItem { [weak self] in
|
||||
guard let self else { return }
|
||||
if self.state == .bufferFinished {
|
||||
self.isMaskShow = false
|
||||
}
|
||||
}
|
||||
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + KSOptions.animateDelayTimeInterval,
|
||||
execute: delayHide!)
|
||||
}
|
||||
}
|
||||
#if os(macOS)
|
||||
show ? NSCursor.unhide() : NSCursor.setHiddenUntilMouseMoves(true)
|
||||
if let window = playerLayer?.player.view?.window {
|
||||
if !window.styleMask.contains(.fullScreen) {
|
||||
window.standardWindowButton(.closeButton)?.superview?.superview?.isHidden = !show
|
||||
// window.standardWindowButton(.zoomButton)?.isHidden = !show
|
||||
// window.standardWindowButton(.closeButton)?.isHidden = !show
|
||||
// window.standardWindowButton(.miniaturizeButton)?.isHidden = !show
|
||||
// window.titleVisibility = show ? .visible : .hidden
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension KSVideoPlayer.Coordinator: KSPlayerLayerDelegate {
|
||||
public func player(layer: KSPlayerLayer, state: KSPlayerState) {
|
||||
onStateChanged?(layer, state)
|
||||
if state == .readyToPlay {
|
||||
playbackRate = layer.player.playbackRate
|
||||
if let subtitleDataSouce = layer.player.subtitleDataSouce {
|
||||
// 要延后增加内嵌字幕。因为有些内嵌字幕是放在视频流的。所以会比readyToPlay回调晚。
|
||||
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 1) { [weak self] in
|
||||
guard let self else { return }
|
||||
self.subtitleModel.addSubtitle(dataSouce: subtitleDataSouce)
|
||||
if self.subtitleModel.selectedSubtitleInfo == nil, layer.options.autoSelectEmbedSubtitle {
|
||||
self.subtitleModel.selectedSubtitleInfo = subtitleDataSouce.infos.first { $0.isEnabled }
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if state == .bufferFinished {
|
||||
isMaskShow = false
|
||||
} else {
|
||||
isMaskShow = true
|
||||
#if canImport(UIKit)
|
||||
if state == .preparing, let view = layer.player.view {
|
||||
let swipeDown = UISwipeGestureRecognizer(target: self, action: #selector(swipeGestureAction(_:)))
|
||||
swipeDown.direction = .down
|
||||
view.addGestureRecognizer(swipeDown)
|
||||
let swipeLeft = UISwipeGestureRecognizer(target: self, action: #selector(swipeGestureAction(_:)))
|
||||
swipeLeft.direction = .left
|
||||
view.addGestureRecognizer(swipeLeft)
|
||||
let swipeRight = UISwipeGestureRecognizer(target: self, action: #selector(swipeGestureAction(_:)))
|
||||
swipeRight.direction = .right
|
||||
view.addGestureRecognizer(swipeRight)
|
||||
let swipeUp = UISwipeGestureRecognizer(target: self, action: #selector(swipeGestureAction(_:)))
|
||||
swipeUp.direction = .up
|
||||
view.addGestureRecognizer(swipeUp)
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
public func player(layer _: KSPlayerLayer, currentTime: TimeInterval, totalTime: TimeInterval) {
|
||||
onPlay?(currentTime, totalTime)
|
||||
if currentTime >= Double(Int.max) || currentTime <= Double(Int.min) || totalTime >= Double(Int.max) || totalTime <= Double(Int.min) {
|
||||
return
|
||||
}
|
||||
let current = Int(currentTime)
|
||||
let total = Int(max(0, totalTime))
|
||||
if timemodel.currentTime != current {
|
||||
timemodel.currentTime = current
|
||||
}
|
||||
if timemodel.totalTime != total {
|
||||
timemodel.totalTime = total
|
||||
}
|
||||
_ = subtitleModel.subtitle(currentTime: currentTime)
|
||||
}
|
||||
|
||||
public func player(layer: KSPlayerLayer, finish error: Error?) {
|
||||
onFinish?(layer, error)
|
||||
}
|
||||
|
||||
public func player(layer _: KSPlayerLayer, bufferedCount: Int, consumeTime: TimeInterval) {
|
||||
onBufferChanged?(bufferedCount, consumeTime)
|
||||
}
|
||||
}
|
||||
|
||||
extension KSVideoPlayer: Equatable {
|
||||
public static func == (lhs: KSVideoPlayer, rhs: KSVideoPlayer) -> Bool {
|
||||
lhs.url == rhs.url
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
public extension KSVideoPlayer {
|
||||
func onBufferChanged(_ handler: @escaping (Int, TimeInterval) -> Void) -> Self {
|
||||
coordinator.onBufferChanged = handler
|
||||
return self
|
||||
}
|
||||
|
||||
/// Playing to the end.
|
||||
func onFinish(_ handler: @escaping (KSPlayerLayer, Error?) -> Void) -> Self {
|
||||
coordinator.onFinish = handler
|
||||
return self
|
||||
}
|
||||
|
||||
func onPlay(_ handler: @escaping (TimeInterval, TimeInterval) -> Void) -> Self {
|
||||
coordinator.onPlay = handler
|
||||
return self
|
||||
}
|
||||
|
||||
/// Playback status changes, such as from play to pause.
|
||||
func onStateChanged(_ handler: @escaping (KSPlayerLayer, KSPlayerState) -> Void) -> Self {
|
||||
coordinator.onStateChanged = handler
|
||||
return self
|
||||
}
|
||||
|
||||
#if canImport(UIKit)
|
||||
func onSwipe(_ handler: @escaping (UISwipeGestureRecognizer.Direction) -> Void) -> Self {
|
||||
coordinator.onSwipe = handler
|
||||
return self
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
extension View {
|
||||
func then(_ body: (inout Self) -> Void) -> Self {
|
||||
var result = self
|
||||
body(&result)
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
/// 这是一个频繁变化的model。View要少用这个
|
||||
public class ControllerTimeModel: ObservableObject {
|
||||
// 改成int才不会频繁更新
|
||||
@Published
|
||||
public var currentTime = 0
|
||||
@Published
|
||||
public var totalTime = 1
|
||||
}
|
||||
@@ -0,0 +1,335 @@
|
||||
//
|
||||
// MediaPlayerProtocol.swift
|
||||
// KSPlayer-tvOS
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
|
||||
public protocol MediaPlayback: AnyObject {
|
||||
var duration: TimeInterval { get }
|
||||
var fileSize: Double { get }
|
||||
var naturalSize: CGSize { get }
|
||||
var chapters: [Chapter] { get }
|
||||
var currentPlaybackTime: TimeInterval { get }
|
||||
func prepareToPlay()
|
||||
func shutdown()
|
||||
func seek(time: TimeInterval, completion: @escaping ((Bool) -> Void))
|
||||
}
|
||||
|
||||
public class DynamicInfo: ObservableObject {
|
||||
private let metadataBlock: () -> [String: String]
|
||||
private let bytesReadBlock: () -> Int64
|
||||
private let audioBitrateBlock: () -> Int
|
||||
private let videoBitrateBlock: () -> Int
|
||||
public var metadata: [String: String] {
|
||||
metadataBlock()
|
||||
}
|
||||
|
||||
public var bytesRead: Int64 {
|
||||
bytesReadBlock()
|
||||
}
|
||||
|
||||
public var audioBitrate: Int {
|
||||
audioBitrateBlock()
|
||||
}
|
||||
|
||||
public var videoBitrate: Int {
|
||||
videoBitrateBlock()
|
||||
}
|
||||
|
||||
@Published
|
||||
public var displayFPS = 0.0
|
||||
public var audioVideoSyncDiff = 0.0
|
||||
public var droppedVideoFrameCount = UInt32(0)
|
||||
public var droppedVideoPacketCount = UInt32(0)
|
||||
init(metadata: @escaping () -> [String: String], bytesRead: @escaping () -> Int64, audioBitrate: @escaping () -> Int, videoBitrate: @escaping () -> Int) {
|
||||
metadataBlock = metadata
|
||||
bytesReadBlock = bytesRead
|
||||
audioBitrateBlock = audioBitrate
|
||||
videoBitrateBlock = videoBitrate
|
||||
}
|
||||
}
|
||||
|
||||
public struct Chapter {
|
||||
public let start: TimeInterval
|
||||
public let end: TimeInterval
|
||||
public let title: String
|
||||
}
|
||||
|
||||
public protocol MediaPlayerProtocol: MediaPlayback {
|
||||
var delegate: MediaPlayerDelegate? { get set }
|
||||
var view: UIView? { get }
|
||||
var playableTime: TimeInterval { get }
|
||||
var isReadyToPlay: Bool { get }
|
||||
var playbackState: MediaPlaybackState { get }
|
||||
var loadState: MediaLoadState { get }
|
||||
var isPlaying: Bool { get }
|
||||
var seekable: Bool { get }
|
||||
// var numberOfBytesTransferred: Int64 { get }
|
||||
var isMuted: Bool { get set }
|
||||
var allowsExternalPlayback: Bool { get set }
|
||||
var usesExternalPlaybackWhileExternalScreenIsActive: Bool { get set }
|
||||
var isExternalPlaybackActive: Bool { get }
|
||||
var playbackRate: Float { get set }
|
||||
var playbackVolume: Float { get set }
|
||||
var contentMode: UIViewContentMode { get set }
|
||||
var subtitleDataSouce: SubtitleDataSouce? { get }
|
||||
@available(macOS 12.0, iOS 15.0, tvOS 15.0, *)
|
||||
var playbackCoordinator: AVPlaybackCoordinator { get }
|
||||
@available(tvOS 14.0, *)
|
||||
var pipController: KSPictureInPictureController? { get }
|
||||
var dynamicInfo: DynamicInfo? { get }
|
||||
init(url: URL, options: KSOptions)
|
||||
func replace(url: URL, options: KSOptions)
|
||||
func play()
|
||||
func pause()
|
||||
func enterBackground()
|
||||
func enterForeground()
|
||||
func thumbnailImageAtCurrentTime() async -> CGImage?
|
||||
func tracks(mediaType: AVFoundation.AVMediaType) -> [MediaPlayerTrack]
|
||||
func select(track: some MediaPlayerTrack)
|
||||
}
|
||||
|
||||
public extension MediaPlayerProtocol {
|
||||
var nominalFrameRate: Float {
|
||||
tracks(mediaType: .video).first { $0.isEnabled }?.nominalFrameRate ?? 0
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
public protocol MediaPlayerDelegate: AnyObject {
|
||||
func readyToPlay(player: some MediaPlayerProtocol)
|
||||
func changeLoadState(player: some MediaPlayerProtocol)
|
||||
// 缓冲加载进度,0-100
|
||||
func changeBuffering(player: some MediaPlayerProtocol, progress: Int)
|
||||
func playBack(player: some MediaPlayerProtocol, loopCount: Int)
|
||||
func finish(player: some MediaPlayerProtocol, error: Error?)
|
||||
}
|
||||
|
||||
public protocol MediaPlayerTrack: AnyObject, CustomStringConvertible {
|
||||
var trackID: Int32 { get }
|
||||
var name: String { get }
|
||||
var languageCode: String? { get }
|
||||
var mediaType: AVFoundation.AVMediaType { get }
|
||||
var nominalFrameRate: Float { get set }
|
||||
var bitRate: Int64 { get }
|
||||
var bitDepth: Int32 { get }
|
||||
var isEnabled: Bool { get set }
|
||||
var isImageSubtitle: Bool { get }
|
||||
var rotation: Int16 { get }
|
||||
var dovi: DOVIDecoderConfigurationRecord? { get }
|
||||
var fieldOrder: FFmpegFieldOrder { get }
|
||||
var formatDescription: CMFormatDescription? { get }
|
||||
}
|
||||
|
||||
// public extension MediaPlayerTrack: Identifiable {
|
||||
// var id: Int32 { trackID }
|
||||
// }
|
||||
|
||||
public enum MediaPlaybackState: Int {
|
||||
case idle
|
||||
case playing
|
||||
case paused
|
||||
case seeking
|
||||
case finished
|
||||
case stopped
|
||||
}
|
||||
|
||||
public enum MediaLoadState: Int {
|
||||
case idle
|
||||
case loading
|
||||
case playable
|
||||
}
|
||||
|
||||
// swiftlint:disable identifier_name
|
||||
public struct DOVIDecoderConfigurationRecord {
|
||||
public let dv_version_major: UInt8
|
||||
public let dv_version_minor: UInt8
|
||||
public let dv_profile: UInt8
|
||||
public let dv_level: UInt8
|
||||
public let rpu_present_flag: UInt8
|
||||
public let el_present_flag: UInt8
|
||||
public let bl_present_flag: UInt8
|
||||
public let dv_bl_signal_compatibility_id: UInt8
|
||||
}
|
||||
|
||||
public enum FFmpegFieldOrder: UInt8 {
|
||||
case unknown = 0
|
||||
case progressive
|
||||
case tt // < Top coded_first, top displayed first
|
||||
case bb // < Bottom coded first, bottom displayed first
|
||||
case tb // < Top coded first, bottom displayed first
|
||||
case bt // < Bottom coded first, top displayed first
|
||||
}
|
||||
|
||||
extension FFmpegFieldOrder: CustomStringConvertible {
|
||||
public var description: String {
|
||||
switch self {
|
||||
case .unknown, .progressive:
|
||||
return "progressive"
|
||||
case .tt:
|
||||
return "top first"
|
||||
case .bb:
|
||||
return "bottom first"
|
||||
case .tb:
|
||||
return "top coded first (swapped)"
|
||||
case .bt:
|
||||
return "bottom coded first (swapped)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// swiftlint:enable identifier_name
|
||||
public extension MediaPlayerTrack {
|
||||
var language: String? {
|
||||
languageCode.flatMap {
|
||||
Locale.current.localizedString(forLanguageCode: $0)
|
||||
}
|
||||
}
|
||||
|
||||
var codecType: FourCharCode {
|
||||
mediaSubType.rawValue
|
||||
}
|
||||
|
||||
var dynamicRange: DynamicRange? {
|
||||
if dovi != nil {
|
||||
return .dolbyVision
|
||||
} else {
|
||||
return formatDescription?.dynamicRange
|
||||
}
|
||||
}
|
||||
|
||||
var colorSpace: CGColorSpace? {
|
||||
KSOptions.colorSpace(ycbcrMatrix: yCbCrMatrix as CFString?, transferFunction: transferFunction as CFString?)
|
||||
}
|
||||
|
||||
var mediaSubType: CMFormatDescription.MediaSubType {
|
||||
formatDescription?.mediaSubType ?? .boxed
|
||||
}
|
||||
|
||||
var audioStreamBasicDescription: AudioStreamBasicDescription? {
|
||||
formatDescription?.audioStreamBasicDescription
|
||||
}
|
||||
|
||||
var naturalSize: CGSize {
|
||||
formatDescription?.naturalSize ?? .zero
|
||||
}
|
||||
|
||||
var colorPrimaries: String? {
|
||||
formatDescription?.colorPrimaries
|
||||
}
|
||||
|
||||
var transferFunction: String? {
|
||||
formatDescription?.transferFunction
|
||||
}
|
||||
|
||||
var yCbCrMatrix: String? {
|
||||
formatDescription?.yCbCrMatrix
|
||||
}
|
||||
}
|
||||
|
||||
public extension CMFormatDescription {
|
||||
var dynamicRange: DynamicRange {
|
||||
let contentRange: DynamicRange
|
||||
if codecType.string == "dvhe" || codecType == kCMVideoCodecType_DolbyVisionHEVC {
|
||||
contentRange = .dolbyVision
|
||||
} else if bitDepth == 10 || transferFunction == kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ as String { /// HDR
|
||||
contentRange = .hdr10
|
||||
} else if transferFunction == kCVImageBufferTransferFunction_ITU_R_2100_HLG as String { /// HLG
|
||||
contentRange = .hlg
|
||||
} else {
|
||||
contentRange = .sdr
|
||||
}
|
||||
return contentRange
|
||||
}
|
||||
|
||||
var bitDepth: Int32 {
|
||||
codecType.bitDepth
|
||||
}
|
||||
|
||||
var codecType: FourCharCode {
|
||||
mediaSubType.rawValue
|
||||
}
|
||||
|
||||
var colorPrimaries: String? {
|
||||
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
|
||||
return dictionary[kCVImageBufferColorPrimariesKey] as? String
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var transferFunction: String? {
|
||||
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
|
||||
return dictionary[kCVImageBufferTransferFunctionKey] as? String
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var yCbCrMatrix: String? {
|
||||
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
|
||||
return dictionary[kCVImageBufferYCbCrMatrixKey] as? String
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var naturalSize: CGSize {
|
||||
let aspectRatio = aspectRatio
|
||||
return CGSize(width: Int(dimensions.width), height: Int(CGFloat(dimensions.height) * aspectRatio.height / aspectRatio.width))
|
||||
}
|
||||
|
||||
var aspectRatio: CGSize {
|
||||
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
|
||||
if let ratio = dictionary[kCVImageBufferPixelAspectRatioKey] as? NSDictionary,
|
||||
let horizontal = (ratio[kCVImageBufferPixelAspectRatioHorizontalSpacingKey] as? NSNumber)?.intValue,
|
||||
let vertical = (ratio[kCVImageBufferPixelAspectRatioVerticalSpacingKey] as? NSNumber)?.intValue,
|
||||
horizontal > 0, vertical > 0
|
||||
{
|
||||
return CGSize(width: horizontal, height: vertical)
|
||||
}
|
||||
}
|
||||
return CGSize(width: 1, height: 1)
|
||||
}
|
||||
|
||||
var depth: Int32 {
|
||||
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
|
||||
return dictionary[kCMFormatDescriptionExtension_Depth] as? Int32 ?? 24
|
||||
} else {
|
||||
return 24
|
||||
}
|
||||
}
|
||||
|
||||
var fullRangeVideo: Bool {
|
||||
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
|
||||
return dictionary[kCMFormatDescriptionExtension_FullRangeVideo] as? Bool ?? false
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setHttpProxy() {
|
||||
guard KSOptions.useSystemHTTPProxy else {
|
||||
return
|
||||
}
|
||||
guard let proxySettings = CFNetworkCopySystemProxySettings()?.takeUnretainedValue() as? NSDictionary else {
|
||||
unsetenv("http_proxy")
|
||||
return
|
||||
}
|
||||
guard let proxyHost = proxySettings[kCFNetworkProxiesHTTPProxy] as? String, let proxyPort = proxySettings[kCFNetworkProxiesHTTPPort] as? Int else {
|
||||
unsetenv("http_proxy")
|
||||
return
|
||||
}
|
||||
let httpProxy = "http://\(proxyHost):\(proxyPort)"
|
||||
setenv("http_proxy", httpProxy, 0)
|
||||
}
|
||||
378
KSPlayer-main/Sources/KSPlayer/AVPlayer/PlayerDefines.swift
Normal file
378
KSPlayer-main/Sources/KSPlayer/AVPlayer/PlayerDefines.swift
Normal file
@@ -0,0 +1,378 @@
|
||||
//
|
||||
// PlayerDefines.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import CoreMedia
|
||||
import CoreServices
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
|
||||
public extension KSOptions {
|
||||
@MainActor
|
||||
static var windowScene: UIWindowScene? {
|
||||
UIApplication.shared.connectedScenes.first as? UIWindowScene
|
||||
}
|
||||
|
||||
@MainActor
|
||||
static var sceneSize: CGSize {
|
||||
let window = windowScene?.windows.first
|
||||
return window?.bounds.size ?? .zero
|
||||
}
|
||||
}
|
||||
#else
|
||||
import AppKit
|
||||
import SwiftUI
|
||||
|
||||
public typealias UIView = NSView
|
||||
public typealias UIPasteboard = NSPasteboard
|
||||
public extension KSOptions {
|
||||
static var sceneSize: CGSize {
|
||||
NSScreen.main?.frame.size ?? .zero
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// extension MediaPlayerTrack {
|
||||
// static func == (lhs: Self, rhs: Self) -> Bool {
|
||||
// lhs.trackID == rhs.trackID
|
||||
// }
|
||||
// }
|
||||
|
||||
public enum DynamicRange: Int32 {
|
||||
case sdr = 0
|
||||
case hdr10 = 2
|
||||
case hlg = 3
|
||||
case dolbyVision = 5
|
||||
|
||||
#if canImport(UIKit)
|
||||
var hdrMode: AVPlayer.HDRMode {
|
||||
switch self {
|
||||
case .sdr:
|
||||
return AVPlayer.HDRMode(rawValue: 0)
|
||||
case .hdr10:
|
||||
return .hdr10 // 2
|
||||
case .hlg:
|
||||
return .hlg // 1
|
||||
case .dolbyVision:
|
||||
return .dolbyVision // 4
|
||||
}
|
||||
}
|
||||
#endif
|
||||
public static var availableHDRModes: [DynamicRange] {
|
||||
#if os(macOS)
|
||||
if NSScreen.main?.maximumPotentialExtendedDynamicRangeColorComponentValue ?? 1.0 > 1.0 {
|
||||
return [.hdr10]
|
||||
} else {
|
||||
return [.sdr]
|
||||
}
|
||||
#else
|
||||
let availableHDRModes = AVPlayer.availableHDRModes
|
||||
if availableHDRModes == AVPlayer.HDRMode(rawValue: 0) {
|
||||
return [.sdr]
|
||||
} else {
|
||||
var modes = [DynamicRange]()
|
||||
if availableHDRModes.contains(.dolbyVision) {
|
||||
modes.append(.dolbyVision)
|
||||
}
|
||||
if availableHDRModes.contains(.hdr10) {
|
||||
modes.append(.hdr10)
|
||||
}
|
||||
if availableHDRModes.contains(.hlg) {
|
||||
modes.append(.hlg)
|
||||
}
|
||||
return modes
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
extension DynamicRange: CustomStringConvertible {
|
||||
public var description: String {
|
||||
switch self {
|
||||
case .sdr:
|
||||
return "SDR"
|
||||
case .hdr10:
|
||||
return "HDR10"
|
||||
case .hlg:
|
||||
return "HLG"
|
||||
case .dolbyVision:
|
||||
return "Dolby Vision"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension DynamicRange {
|
||||
var colorPrimaries: CFString {
|
||||
switch self {
|
||||
case .sdr:
|
||||
return kCVImageBufferColorPrimaries_ITU_R_709_2
|
||||
case .hdr10, .hlg, .dolbyVision:
|
||||
return kCVImageBufferColorPrimaries_ITU_R_2020
|
||||
}
|
||||
}
|
||||
|
||||
var transferFunction: CFString {
|
||||
switch self {
|
||||
case .sdr:
|
||||
return kCVImageBufferTransferFunction_ITU_R_709_2
|
||||
case .hdr10:
|
||||
return kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ
|
||||
case .hlg, .dolbyVision:
|
||||
return kCVImageBufferTransferFunction_ITU_R_2100_HLG
|
||||
}
|
||||
}
|
||||
|
||||
var yCbCrMatrix: CFString {
|
||||
switch self {
|
||||
case .sdr:
|
||||
return kCVImageBufferYCbCrMatrix_ITU_R_709_2
|
||||
case .hdr10, .hlg, .dolbyVision:
|
||||
return kCVImageBufferYCbCrMatrix_ITU_R_2020
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
public enum DisplayEnum {
|
||||
case plane
|
||||
// swiftlint:disable identifier_name
|
||||
case vr
|
||||
// swiftlint:enable identifier_name
|
||||
case vrBox
|
||||
}
|
||||
|
||||
public struct VideoAdaptationState {
|
||||
public struct BitRateState {
|
||||
let bitRate: Int64
|
||||
let time: TimeInterval
|
||||
}
|
||||
|
||||
public let bitRates: [Int64]
|
||||
public let duration: TimeInterval
|
||||
public internal(set) var fps: Float
|
||||
public internal(set) var bitRateStates: [BitRateState]
|
||||
public internal(set) var currentPlaybackTime: TimeInterval = 0
|
||||
public internal(set) var isPlayable: Bool = false
|
||||
public internal(set) var loadedCount: Int = 0
|
||||
}
|
||||
|
||||
public enum ClockProcessType {
|
||||
case remain
|
||||
case next
|
||||
case dropNextFrame
|
||||
case dropNextPacket
|
||||
case dropGOPPacket
|
||||
case flush
|
||||
case seek
|
||||
}
|
||||
|
||||
// 缓冲情况
|
||||
public protocol CapacityProtocol {
|
||||
var fps: Float { get }
|
||||
var packetCount: Int { get }
|
||||
var frameCount: Int { get }
|
||||
var frameMaxCount: Int { get }
|
||||
var isEndOfFile: Bool { get }
|
||||
var mediaType: AVFoundation.AVMediaType { get }
|
||||
}
|
||||
|
||||
extension CapacityProtocol {
|
||||
var loadedTime: TimeInterval {
|
||||
TimeInterval(packetCount + frameCount) / TimeInterval(fps)
|
||||
}
|
||||
}
|
||||
|
||||
public struct LoadingState {
|
||||
public let loadedTime: TimeInterval
|
||||
public let progress: TimeInterval
|
||||
public let packetCount: Int
|
||||
public let frameCount: Int
|
||||
public let isEndOfFile: Bool
|
||||
public let isPlayable: Bool
|
||||
public let isFirst: Bool
|
||||
public let isSeek: Bool
|
||||
}
|
||||
|
||||
public let KSPlayerErrorDomain = "KSPlayerErrorDomain"
|
||||
|
||||
public enum KSPlayerErrorCode: Int {
|
||||
case unknown
|
||||
case formatCreate
|
||||
case formatOpenInput
|
||||
case formatOutputCreate
|
||||
case formatWriteHeader
|
||||
case formatFindStreamInfo
|
||||
case readFrame
|
||||
case codecContextCreate
|
||||
case codecContextSetParam
|
||||
case codecContextFindDecoder
|
||||
case codesContextOpen
|
||||
case codecVideoSendPacket
|
||||
case codecAudioSendPacket
|
||||
case codecVideoReceiveFrame
|
||||
case codecAudioReceiveFrame
|
||||
case auidoSwrInit
|
||||
case codecSubtitleSendPacket
|
||||
case videoTracksUnplayable
|
||||
case subtitleUnEncoding
|
||||
case subtitleUnParse
|
||||
case subtitleFormatUnSupport
|
||||
case subtitleParamsEmpty
|
||||
}
|
||||
|
||||
extension KSPlayerErrorCode: CustomStringConvertible {
|
||||
public var description: String {
|
||||
switch self {
|
||||
case .formatCreate:
|
||||
return "avformat_alloc_context return nil"
|
||||
case .formatOpenInput:
|
||||
return "avformat can't open input"
|
||||
case .formatOutputCreate:
|
||||
return "avformat_alloc_output_context2 fail"
|
||||
case .formatWriteHeader:
|
||||
return "avformat_write_header fail"
|
||||
case .formatFindStreamInfo:
|
||||
return "avformat_find_stream_info return nil"
|
||||
case .codecContextCreate:
|
||||
return "avcodec_alloc_context3 return nil"
|
||||
case .codecContextSetParam:
|
||||
return "avcodec can't set parameters to context"
|
||||
case .codesContextOpen:
|
||||
return "codesContext can't Open"
|
||||
case .codecVideoReceiveFrame:
|
||||
return "avcodec can't receive video frame"
|
||||
case .codecAudioReceiveFrame:
|
||||
return "avcodec can't receive audio frame"
|
||||
case .videoTracksUnplayable:
|
||||
return "VideoTracks are not even playable."
|
||||
case .codecSubtitleSendPacket:
|
||||
return "avcodec can't decode subtitle"
|
||||
case .subtitleUnEncoding:
|
||||
return "Subtitle encoding format is not supported."
|
||||
case .subtitleUnParse:
|
||||
return "Subtitle parsing error"
|
||||
case .subtitleFormatUnSupport:
|
||||
return "Current subtitle format is not supported"
|
||||
case .subtitleParamsEmpty:
|
||||
return "Subtitle Params is empty"
|
||||
case .auidoSwrInit:
|
||||
return "swr_init swrContext fail"
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension NSError {
|
||||
convenience init(errorCode: KSPlayerErrorCode, userInfo: [String: Any] = [:]) {
|
||||
var userInfo = userInfo
|
||||
userInfo[NSLocalizedDescriptionKey] = errorCode.description
|
||||
self.init(domain: KSPlayerErrorDomain, code: errorCode.rawValue, userInfo: userInfo)
|
||||
}
|
||||
|
||||
convenience init(description: String) {
|
||||
var userInfo = [String: Any]()
|
||||
userInfo[NSLocalizedDescriptionKey] = description
|
||||
self.init(domain: KSPlayerErrorDomain, code: 0, userInfo: userInfo)
|
||||
}
|
||||
}
|
||||
|
||||
#if !SWIFT_PACKAGE
|
||||
extension Bundle {
|
||||
static let module = Bundle(for: KSPlayerLayer.self).path(forResource: "KSPlayer_KSPlayer", ofType: "bundle").flatMap { Bundle(path: $0) } ?? Bundle.main
|
||||
}
|
||||
#endif
|
||||
|
||||
public enum TimeType {
|
||||
case min
|
||||
case hour
|
||||
case minOrHour
|
||||
case millisecond
|
||||
}
|
||||
|
||||
public extension TimeInterval {
|
||||
func toString(for type: TimeType) -> String {
|
||||
Int(ceil(self)).toString(for: type)
|
||||
}
|
||||
}
|
||||
|
||||
public extension Int {
|
||||
func toString(for type: TimeType) -> String {
|
||||
var second = self
|
||||
var min = second / 60
|
||||
second -= min * 60
|
||||
switch type {
|
||||
case .min:
|
||||
return String(format: "%02d:%02d", min, second)
|
||||
case .hour:
|
||||
let hour = min / 60
|
||||
min -= hour * 60
|
||||
return String(format: "%d:%02d:%02d", hour, min, second)
|
||||
case .minOrHour:
|
||||
let hour = min / 60
|
||||
if hour > 0 {
|
||||
min -= hour * 60
|
||||
return String(format: "%d:%02d:%02d", hour, min, second)
|
||||
} else {
|
||||
return String(format: "%02d:%02d", min, second)
|
||||
}
|
||||
case .millisecond:
|
||||
var time = self * 100
|
||||
let millisecond = time % 100
|
||||
time /= 100
|
||||
let sec = time % 60
|
||||
time /= 60
|
||||
let min = time % 60
|
||||
time /= 60
|
||||
let hour = time % 60
|
||||
if hour > 0 {
|
||||
return String(format: "%d:%02d:%02d.%02d", hour, min, sec, millisecond)
|
||||
} else {
|
||||
return String(format: "%02d:%02d.%02d", min, sec, millisecond)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public extension FixedWidthInteger {
|
||||
var kmFormatted: String {
|
||||
Double(self).kmFormatted
|
||||
}
|
||||
}
|
||||
|
||||
open class AbstractAVIOContext {
|
||||
let bufferSize: Int32
|
||||
let writable: Bool
|
||||
public init(bufferSize: Int32 = 32 * 1024, writable: Bool = false) {
|
||||
self.bufferSize = bufferSize
|
||||
self.writable = writable
|
||||
}
|
||||
|
||||
open func read(buffer _: UnsafePointer<UInt8>?, size: Int32) -> Int32 {
|
||||
size
|
||||
}
|
||||
|
||||
open func write(buffer _: UnsafePointer<UInt8>?, size: Int32) -> Int32 {
|
||||
size
|
||||
}
|
||||
|
||||
/**
|
||||
#define SEEK_SET 0 /* set file offset to offset */
|
||||
#define SEEK_CUR 1 /* set file offset to current plus offset */
|
||||
#define SEEK_END 2 /* set file offset to EOF plus offset */
|
||||
*/
|
||||
open func seek(offset: Int64, whence _: Int32) -> Int64 {
|
||||
offset
|
||||
}
|
||||
|
||||
open func fileSize() -> Int64 {
|
||||
-1
|
||||
}
|
||||
|
||||
open func close() {}
|
||||
deinit {}
|
||||
}
|
||||
36
KSPlayer-main/Sources/KSPlayer/Audio/AudioPlayerView.swift
Normal file
36
KSPlayer-main/Sources/KSPlayer/Audio/AudioPlayerView.swift
Normal file
@@ -0,0 +1,36 @@
|
||||
//
|
||||
// AudioPlayerView.swift
|
||||
// VoiceNote
|
||||
//
|
||||
// Created by kintan on 2018/8/16.
|
||||
//
|
||||
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
open class AudioPlayerView: PlayerView {
|
||||
override public init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
toolBar.timeType = .min
|
||||
toolBar.spacing = 5
|
||||
toolBar.addArrangedSubview(toolBar.playButton)
|
||||
toolBar.addArrangedSubview(toolBar.currentTimeLabel)
|
||||
toolBar.addArrangedSubview(toolBar.timeSlider)
|
||||
toolBar.addArrangedSubview(toolBar.totalTimeLabel)
|
||||
toolBar.playButton.tintColor = UIColor(rgb: 0x2166FF)
|
||||
toolBar.timeSlider.setThumbImage(UIColor(rgb: 0x2980FF).createImage(size: CGSize(width: 2, height: 15)), for: .normal)
|
||||
toolBar.timeSlider.minimumTrackTintColor = UIColor(rgb: 0xC8C7CC)
|
||||
toolBar.timeSlider.maximumTrackTintColor = UIColor(rgb: 0xEDEDED)
|
||||
toolBar.timeSlider.trackHeigt = 7
|
||||
addSubview(toolBar)
|
||||
toolBar.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
toolBar.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 7),
|
||||
toolBar.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -12),
|
||||
toolBar.topAnchor.constraint(equalTo: topAnchor),
|
||||
toolBar.bottomAnchor.constraint(equalTo: bottomAnchor),
|
||||
])
|
||||
}
|
||||
}
|
||||
594
KSPlayer-main/Sources/KSPlayer/Core/AppKitExtend.swift
Normal file
594
KSPlayer-main/Sources/KSPlayer/Core/AppKitExtend.swift
Normal file
@@ -0,0 +1,594 @@
|
||||
//
|
||||
// AppKitExtend.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
|
||||
/// 'NSWindow' is unavailable in Mac Catalyst
|
||||
#if !canImport(UIKit)
|
||||
import AppKit
|
||||
import CoreMedia
|
||||
import IOKit.pwr_mgt
|
||||
|
||||
public typealias UIApplicationDelegate = NSApplicationDelegate
|
||||
public typealias UIApplication = NSApplication
|
||||
public typealias UIWindow = NSWindow
|
||||
public typealias UIViewController = NSViewController
|
||||
public typealias UIColor = NSColor
|
||||
public typealias UIStackView = NSStackView
|
||||
public typealias UIPanGestureRecognizer = NSPanGestureRecognizer
|
||||
public typealias UIGestureRecognizer = NSGestureRecognizer
|
||||
public typealias UIGestureRecognizerDelegate = NSGestureRecognizerDelegate
|
||||
public typealias UIViewContentMode = ContentMode
|
||||
public typealias UIFont = NSFont
|
||||
public typealias UIFontDescriptor = NSFontDescriptor
|
||||
public typealias UIControl = NSControl
|
||||
public typealias UITextField = NSTextField
|
||||
public typealias UIImageView = NSImageView
|
||||
public typealias UITapGestureRecognizer = NSClickGestureRecognizer
|
||||
public typealias UXSlider = NSSlider
|
||||
public typealias UITableView = NSTableView
|
||||
public typealias UITableViewDelegate = NSTableViewDelegate
|
||||
public typealias UITableViewDataSource = NSTableViewDataSource
|
||||
public typealias UITouch = NSTouch
|
||||
public typealias UIEvent = NSEvent
|
||||
public typealias UIButton = KSButton
|
||||
public extension UIFontDescriptor.SymbolicTraits {
|
||||
static var traitItalic = italic
|
||||
static var traitBold = bold
|
||||
}
|
||||
|
||||
extension NSScreen {
|
||||
var scale: CGFloat {
|
||||
backingScaleFactor
|
||||
}
|
||||
}
|
||||
|
||||
public extension NSClickGestureRecognizer {
|
||||
var numberOfTapsRequired: Int {
|
||||
get {
|
||||
numberOfClicksRequired
|
||||
}
|
||||
set {
|
||||
numberOfClicksRequired = newValue
|
||||
}
|
||||
}
|
||||
|
||||
func require(toFail otherGestureRecognizer: NSClickGestureRecognizer) {
|
||||
buttonMask = otherGestureRecognizer.buttonMask << 1
|
||||
}
|
||||
}
|
||||
|
||||
public extension NSView {
|
||||
@objc internal var contentMode: UIViewContentMode {
|
||||
get {
|
||||
if let contentsGravity = backingLayer?.contentsGravity {
|
||||
switch contentsGravity {
|
||||
case .resize:
|
||||
return .scaleToFill
|
||||
case .resizeAspect:
|
||||
return .scaleAspectFit
|
||||
case .resizeAspectFill:
|
||||
return .scaleAspectFill
|
||||
default:
|
||||
return .scaleAspectFit
|
||||
}
|
||||
} else {
|
||||
return .scaleAspectFit
|
||||
}
|
||||
}
|
||||
set {
|
||||
switch newValue {
|
||||
case .scaleToFill:
|
||||
backingLayer?.contentsGravity = .resize
|
||||
case .scaleAspectFit:
|
||||
backingLayer?.contentsGravity = .resizeAspect
|
||||
case .scaleAspectFill:
|
||||
backingLayer?.contentsGravity = .resizeAspectFill
|
||||
case .center:
|
||||
backingLayer?.contentsGravity = .center
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var center: CGPoint {
|
||||
CGPoint(x: frame.midX, y: frame.midY)
|
||||
}
|
||||
|
||||
var alpha: CGFloat {
|
||||
get {
|
||||
alphaValue
|
||||
}
|
||||
set {
|
||||
alphaValue = newValue
|
||||
}
|
||||
}
|
||||
|
||||
var clipsToBounds: Bool {
|
||||
get {
|
||||
if let layer {
|
||||
return layer.masksToBounds
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
set {
|
||||
backingLayer?.masksToBounds = newValue
|
||||
}
|
||||
}
|
||||
|
||||
class func animate(withDuration duration: TimeInterval, animations: @escaping () -> Void, completion: ((Bool) -> Void)? = nil) {
|
||||
CATransaction.begin()
|
||||
CATransaction.setAnimationDuration(duration)
|
||||
CATransaction.setCompletionBlock {
|
||||
completion?(true)
|
||||
}
|
||||
animations()
|
||||
CATransaction.commit()
|
||||
}
|
||||
|
||||
class func animate(withDuration duration: TimeInterval, animations: @escaping () -> Void) {
|
||||
animate(withDuration: duration, animations: animations, completion: nil)
|
||||
}
|
||||
|
||||
func layoutIfNeeded() {
|
||||
layer?.layoutIfNeeded()
|
||||
}
|
||||
|
||||
func centerRotate(byDegrees: Double) {
|
||||
layer?.position = center
|
||||
layer?.anchorPoint = CGPoint(x: 0.5, y: 0.5)
|
||||
layer?.setAffineTransform(CGAffineTransform(rotationAngle: CGFloat(Double.pi * byDegrees / 180.0)))
|
||||
}
|
||||
}
|
||||
|
||||
public extension NSImage {
|
||||
convenience init(cgImage: CGImage) {
|
||||
self.init(cgImage: cgImage, size: NSSize.zero)
|
||||
}
|
||||
|
||||
@available(macOS 11.0, *)
|
||||
convenience init?(systemName: String) {
|
||||
self.init(systemSymbolName: systemName, accessibilityDescription: nil)
|
||||
}
|
||||
}
|
||||
|
||||
extension NSButton {
|
||||
var titleFont: UIFont? {
|
||||
get {
|
||||
font
|
||||
}
|
||||
set {
|
||||
font = newValue
|
||||
}
|
||||
}
|
||||
|
||||
var tintColor: UIColor? {
|
||||
get {
|
||||
contentTintColor
|
||||
}
|
||||
set {
|
||||
contentTintColor = newValue
|
||||
}
|
||||
}
|
||||
|
||||
var backgroundColor: UIColor? {
|
||||
get {
|
||||
if let layer, let cgColor = layer.backgroundColor {
|
||||
return UIColor(cgColor: cgColor)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
set {
|
||||
backingLayer?.backgroundColor = newValue?.cgColor
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension NSImageView {
|
||||
var backgroundColor: UIColor? {
|
||||
get {
|
||||
if let layer, let cgColor = layer.backgroundColor {
|
||||
return UIColor(cgColor: cgColor)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
set {
|
||||
backingLayer?.backgroundColor = newValue?.cgColor
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public extension NSControl {
|
||||
var textAlignment: NSTextAlignment {
|
||||
get {
|
||||
alignment
|
||||
}
|
||||
set {
|
||||
alignment = newValue
|
||||
}
|
||||
}
|
||||
|
||||
var text: String {
|
||||
get {
|
||||
stringValue
|
||||
}
|
||||
set {
|
||||
stringValue = newValue
|
||||
}
|
||||
}
|
||||
|
||||
var attributedText: NSAttributedString? {
|
||||
get {
|
||||
attributedStringValue
|
||||
}
|
||||
set {
|
||||
attributedStringValue = newValue ?? NSAttributedString()
|
||||
}
|
||||
}
|
||||
|
||||
var numberOfLines: Int {
|
||||
get {
|
||||
usesSingleLineMode ? 1 : 0
|
||||
}
|
||||
set {
|
||||
usesSingleLineMode = newValue == 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public extension NSTextContainer {
|
||||
var numberOfLines: Int {
|
||||
get {
|
||||
maximumNumberOfLines
|
||||
}
|
||||
set {
|
||||
maximumNumberOfLines = newValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public extension NSResponder {
|
||||
var next: NSResponder? {
|
||||
nextResponder
|
||||
}
|
||||
}
|
||||
|
||||
public extension NSSlider {
|
||||
var minimumTrackTintColor: UIColor? {
|
||||
get {
|
||||
trackFillColor
|
||||
}
|
||||
set {
|
||||
trackFillColor = newValue
|
||||
}
|
||||
}
|
||||
|
||||
var maximumTrackTintColor: UIColor? {
|
||||
get {
|
||||
nil
|
||||
}
|
||||
set {}
|
||||
}
|
||||
}
|
||||
|
||||
public extension NSStackView {
|
||||
var axis: NSUserInterfaceLayoutOrientation {
|
||||
get {
|
||||
orientation
|
||||
}
|
||||
set {
|
||||
orientation = newValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public extension NSGestureRecognizer {
|
||||
func addTarget(_ target: AnyObject, action: Selector) {
|
||||
self.target = target
|
||||
self.action = action
|
||||
}
|
||||
}
|
||||
|
||||
public extension UIApplication {
|
||||
private static var assertionID = IOPMAssertionID()
|
||||
static var isIdleTimerDisabled = false {
|
||||
didSet {
|
||||
if isIdleTimerDisabled != oldValue {
|
||||
if isIdleTimerDisabled {
|
||||
_ = IOPMAssertionCreateWithName(kIOPMAssertionTypeNoDisplaySleep as CFString,
|
||||
IOPMAssertionLevel(kIOPMAssertionLevelOn),
|
||||
"KSPlayer is playing video" as CFString,
|
||||
&assertionID)
|
||||
} else {
|
||||
_ = IOPMAssertionRelease(assertionID)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var isIdleTimerDisabled: Bool {
|
||||
get {
|
||||
UIApplication.isIdleTimerDisabled
|
||||
}
|
||||
set {
|
||||
UIApplication.isIdleTimerDisabled = newValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// @available(*, unavailable, renamed: "UIView.ContentMode")
|
||||
@objc public enum ContentMode: Int {
|
||||
case scaleToFill
|
||||
|
||||
case scaleAspectFit // contents scaled to fit with fixed aspect. remainder is transparent
|
||||
|
||||
case scaleAspectFill // contents scaled to fill with fixed aspect. some portion of content may be clipped.
|
||||
|
||||
case redraw // redraw on bounds change (calls -setNeedsDisplay)
|
||||
|
||||
case center // contents remain same size. positioned adjusted.
|
||||
|
||||
case top
|
||||
|
||||
case bottom
|
||||
|
||||
case left
|
||||
|
||||
case right
|
||||
|
||||
case topLeft
|
||||
|
||||
case topRight
|
||||
|
||||
case bottomLeft
|
||||
|
||||
case bottomRight
|
||||
}
|
||||
|
||||
public extension UIControl {
|
||||
struct State: OptionSet {
|
||||
public var rawValue: UInt
|
||||
public init(rawValue: UInt) { self.rawValue = rawValue }
|
||||
public static var normal = State(rawValue: 1 << 0)
|
||||
public static var highlighted = State(rawValue: 1 << 1)
|
||||
public static var disabled = State(rawValue: 1 << 2)
|
||||
public static var selected = State(rawValue: 1 << 3)
|
||||
public static var focused = State(rawValue: 1 << 4)
|
||||
public static var application = State(rawValue: 1 << 5)
|
||||
public static var reserved = State(rawValue: 1 << 6)
|
||||
}
|
||||
}
|
||||
|
||||
extension UIControl.State: Hashable {}
|
||||
public class UILabel: NSTextField {
|
||||
override init(frame frameRect: CGRect) {
|
||||
super.init(frame: frameRect)
|
||||
alignment = .left
|
||||
isBordered = false
|
||||
isEditable = false
|
||||
isSelectable = false
|
||||
isBezeled = false
|
||||
drawsBackground = false
|
||||
focusRingType = .none
|
||||
textColor = NSColor.white
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
}
|
||||
|
||||
public class KSButton: NSButton {
|
||||
private var images = [UIControl.State: UIImage]()
|
||||
private var titles = [UIControl.State: String]()
|
||||
private var titleColors = [State: UIColor]()
|
||||
private var targetActions = [ControlEvents: (AnyObject?, Selector)]()
|
||||
|
||||
override public init(frame frameRect: CGRect) {
|
||||
super.init(frame: frameRect)
|
||||
isBordered = false
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
public var isSelected: Bool = false {
|
||||
didSet {
|
||||
update(state: isSelected ? .selected : .normal)
|
||||
}
|
||||
}
|
||||
|
||||
override public var isEnabled: Bool {
|
||||
didSet {
|
||||
update(state: isEnabled ? .normal : .disabled)
|
||||
}
|
||||
}
|
||||
|
||||
open func setImage(_ image: UIImage?, for state: UIControl.State) {
|
||||
images[state] = image
|
||||
if state == .normal, isEnabled, !isSelected {
|
||||
self.image = image
|
||||
}
|
||||
}
|
||||
|
||||
open func setTitle(_ title: String, for state: UIControl.State) {
|
||||
titles[state] = title
|
||||
if state == .normal, isEnabled, !isSelected {
|
||||
self.title = title
|
||||
}
|
||||
}
|
||||
|
||||
open func setTitleColor(_ titleColor: UIColor?, for state: UIControl.State) {
|
||||
titleColors[state] = titleColor
|
||||
if state == .normal, isEnabled, !isSelected {
|
||||
// self.titleColor = titleColor
|
||||
}
|
||||
}
|
||||
|
||||
private func update(state: UIControl.State) {
|
||||
if let stateImage = images[state] {
|
||||
image = stateImage
|
||||
}
|
||||
if let stateTitle = titles[state] {
|
||||
title = stateTitle
|
||||
}
|
||||
}
|
||||
|
||||
open func addTarget(_ target: AnyObject?, action: Selector, for controlEvents: ControlEvents) {
|
||||
targetActions[controlEvents] = (target, action)
|
||||
}
|
||||
|
||||
open func removeTarget(_: AnyObject?, action _: Selector?, for controlEvents: ControlEvents) {
|
||||
targetActions.removeValue(forKey: controlEvents)
|
||||
}
|
||||
|
||||
override open func updateTrackingAreas() {
|
||||
for trackingArea in trackingAreas {
|
||||
removeTrackingArea(trackingArea)
|
||||
}
|
||||
let trackingArea = NSTrackingArea(rect: bounds, options: [.mouseEnteredAndExited, .mouseMoved, .activeInKeyWindow], owner: self, userInfo: nil)
|
||||
addTrackingArea(trackingArea)
|
||||
}
|
||||
|
||||
override public func mouseDown(with event: NSEvent) {
|
||||
super.mouseDown(with: event)
|
||||
if let (target, action) = targetActions[.touchUpInside] ?? targetActions[.primaryActionTriggered] {
|
||||
_ = target?.perform(action, with: self)
|
||||
}
|
||||
}
|
||||
|
||||
override public func mouseEntered(with event: NSEvent) {
|
||||
super.mouseEntered(with: event)
|
||||
if let (target, action) = targetActions[.mouseExited] {
|
||||
_ = target?.perform(action, with: self)
|
||||
}
|
||||
}
|
||||
|
||||
override public func mouseExited(with event: NSEvent) {
|
||||
super.mouseExited(with: event)
|
||||
if let (target, action) = targetActions[.mouseExited] {
|
||||
_ = target?.perform(action, with: self)
|
||||
}
|
||||
}
|
||||
|
||||
open func sendActions(for controlEvents: ControlEvents) {
|
||||
if let (target, action) = targetActions[controlEvents] {
|
||||
_ = target?.perform(action, with: self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class KSSlider: NSSlider {
|
||||
weak var delegate: KSSliderDelegate?
|
||||
public var trackHeigt = CGFloat(2)
|
||||
public var isPlayable = false
|
||||
public var isUserInteractionEnabled: Bool = true
|
||||
var tintColor: UIColor?
|
||||
public convenience init() {
|
||||
self.init(frame: .zero)
|
||||
}
|
||||
|
||||
override public init(frame frameRect: CGRect) {
|
||||
super.init(frame: frameRect)
|
||||
target = self
|
||||
action = #selector(progressSliderTouchEnded(_:))
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
@objc private func progressSliderTouchEnded(_ sender: KSSlider) {
|
||||
if isUserInteractionEnabled {
|
||||
delegate?.slider(value: Double(sender.floatValue), event: .touchUpInside)
|
||||
}
|
||||
}
|
||||
|
||||
open func setThumbImage(_: UIImage?, for _: State) {}
|
||||
|
||||
@IBInspectable var maximumValue: Float {
|
||||
get {
|
||||
Float(maxValue)
|
||||
}
|
||||
set {
|
||||
maxValue = Double(newValue)
|
||||
}
|
||||
}
|
||||
|
||||
@IBInspectable var minimumValue: Float {
|
||||
get {
|
||||
Float(minValue)
|
||||
}
|
||||
set {
|
||||
minValue = Double(newValue)
|
||||
}
|
||||
}
|
||||
|
||||
@IBInspectable var value: Float {
|
||||
get {
|
||||
floatValue
|
||||
}
|
||||
set {
|
||||
floatValue = newValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension UIView {
|
||||
func image() -> UIImage? {
|
||||
guard let rep = bitmapImageRepForCachingDisplay(in: bounds) else {
|
||||
return nil
|
||||
}
|
||||
cacheDisplay(in: bounds, to: rep)
|
||||
let image = NSImage(size: bounds.size)
|
||||
image.addRepresentation(rep)
|
||||
return image
|
||||
}
|
||||
}
|
||||
|
||||
// todo
|
||||
open class UIAlertController: UIViewController {
|
||||
public enum Style: Int {
|
||||
case actionSheet
|
||||
case alert
|
||||
}
|
||||
|
||||
public convenience init(title _: String?, message _: String?, preferredStyle _: UIAlertController.Style) {
|
||||
self.init()
|
||||
}
|
||||
|
||||
var preferredAction: UIAlertAction?
|
||||
|
||||
open func addAction(_: UIAlertAction) {}
|
||||
}
|
||||
|
||||
open class UIAlertAction: NSObject {
|
||||
public enum Style: Int {
|
||||
case `default`
|
||||
case cancel
|
||||
case destructive
|
||||
}
|
||||
|
||||
public let title: String?
|
||||
public let style: UIAlertAction.Style
|
||||
public private(set) var isEnabled: Bool = false
|
||||
public init(title: String?, style: UIAlertAction.Style, handler _: ((UIAlertAction) -> Void)? = nil) {
|
||||
self.title = title
|
||||
self.style = style
|
||||
}
|
||||
}
|
||||
|
||||
public extension UIViewController {
|
||||
func present(_: UIViewController, animated _: Bool, completion _: (() -> Void)? = nil) {}
|
||||
}
|
||||
#endif
|
||||
282
KSPlayer-main/Sources/KSPlayer/Core/PlayerToolBar.swift
Normal file
282
KSPlayer-main/Sources/KSPlayer/Core/PlayerToolBar.swift
Normal file
@@ -0,0 +1,282 @@
|
||||
//
|
||||
// PlayerToolBar.swift
|
||||
// Pods
|
||||
//
|
||||
// Created by kintan on 16/5/21.
|
||||
//
|
||||
//
|
||||
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
import AVKit
|
||||
|
||||
public class PlayerToolBar: UIStackView {
|
||||
public let srtButton = UIButton()
|
||||
public let timeLabel = UILabel()
|
||||
public let currentTimeLabel = UILabel()
|
||||
public let totalTimeLabel = UILabel()
|
||||
public let playButton = UIButton()
|
||||
public let timeSlider = KSSlider()
|
||||
public let playbackRateButton = UIButton()
|
||||
public let videoSwitchButton = UIButton()
|
||||
public let audioSwitchButton = UIButton()
|
||||
public let definitionButton = UIButton()
|
||||
public let pipButton = UIButton()
|
||||
public var onFocusUpdate: ((_ cofusedItem: UIView) -> Void)?
|
||||
public var timeType = TimeType.minOrHour {
|
||||
didSet {
|
||||
if timeType != oldValue {
|
||||
let currentTimeText = currentTime.toString(for: timeType)
|
||||
let totalTimeText = totalTime.toString(for: timeType)
|
||||
currentTimeLabel.text = currentTimeText
|
||||
totalTimeLabel.text = totalTimeText
|
||||
timeLabel.text = "\(currentTimeText) / \(totalTimeText)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var currentTime: TimeInterval = 0 {
|
||||
didSet {
|
||||
guard !currentTime.isNaN else {
|
||||
currentTime = 0
|
||||
return
|
||||
}
|
||||
if currentTime != oldValue {
|
||||
let text = currentTime.toString(for: timeType)
|
||||
currentTimeLabel.text = text
|
||||
timeLabel.text = "\(text) / \(totalTime.toString(for: timeType))"
|
||||
if isLiveStream {
|
||||
timeSlider.value = Float(todayInterval)
|
||||
} else {
|
||||
timeSlider.value = Float(currentTime)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lazy var startDateTimeInteral: TimeInterval = {
|
||||
let date = Date()
|
||||
let calendar = Calendar.current
|
||||
let components = calendar.dateComponents([.year, .month, .day], from: date)
|
||||
let startDate = calendar.date(from: components)
|
||||
return startDate?.timeIntervalSince1970 ?? 0
|
||||
}()
|
||||
|
||||
var todayInterval: TimeInterval {
|
||||
Date().timeIntervalSince1970 - startDateTimeInteral
|
||||
}
|
||||
|
||||
public var totalTime: TimeInterval = 0 {
|
||||
didSet {
|
||||
guard !totalTime.isNaN else {
|
||||
totalTime = 0
|
||||
return
|
||||
}
|
||||
if totalTime != oldValue {
|
||||
let text = totalTime.toString(for: timeType)
|
||||
totalTimeLabel.text = text
|
||||
timeLabel.text = "\(currentTime.toString(for: timeType)) / \(text)"
|
||||
timeSlider.maximumValue = Float(totalTime)
|
||||
}
|
||||
if isLiveStream {
|
||||
timeSlider.maximumValue = Float(60 * 60 * 24)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var isLiveStream: Bool {
|
||||
totalTime == 0
|
||||
}
|
||||
|
||||
public var isSeekable: Bool = true {
|
||||
didSet {
|
||||
timeSlider.isUserInteractionEnabled = isSeekable
|
||||
}
|
||||
}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
initUI()
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
private func initUI() {
|
||||
let focusColor = UIColor.white
|
||||
let tintColor = UIColor.gray
|
||||
distribution = .fill
|
||||
currentTimeLabel.textColor = UIColor(rgb: 0x9B9B9B)
|
||||
currentTimeLabel.font = UIFont.monospacedDigitSystemFont(ofSize: 14, weight: .regular)
|
||||
currentTimeLabel.text = 0.toString(for: timeType)
|
||||
totalTimeLabel.textColor = UIColor(rgb: 0x9B9B9B)
|
||||
totalTimeLabel.font = UIFont.monospacedDigitSystemFont(ofSize: 14, weight: .regular)
|
||||
totalTimeLabel.text = 0.toString(for: timeType)
|
||||
|
||||
timeLabel.textColor = UIColor(rgb: 0x9B9B9B)
|
||||
timeLabel.textAlignment = .left
|
||||
timeLabel.font = UIFont.monospacedDigitSystemFont(ofSize: 14, weight: .regular)
|
||||
timeLabel.text = "\(0.toString(for: timeType)) / \(0.toString(for: timeType))"
|
||||
timeSlider.minimumValue = 0
|
||||
#if os(iOS)
|
||||
if #available(macCatalyst 15.0, iOS 15.0, *) {
|
||||
timeSlider.preferredBehavioralStyle = .pad
|
||||
timeSlider.maximumTrackTintColor = focusColor.withAlphaComponent(0.2)
|
||||
timeSlider.minimumTrackTintColor = focusColor
|
||||
}
|
||||
#endif
|
||||
#if !targetEnvironment(macCatalyst)
|
||||
timeSlider.maximumTrackTintColor = focusColor.withAlphaComponent(0.2)
|
||||
timeSlider.minimumTrackTintColor = focusColor
|
||||
#endif
|
||||
playButton.tag = PlayerButtonType.play.rawValue
|
||||
playButton.setTitleColor(focusColor, for: .focused)
|
||||
playButton.setTitleColor(tintColor, for: .normal)
|
||||
playbackRateButton.tag = PlayerButtonType.rate.rawValue
|
||||
playbackRateButton.titleFont = .systemFont(ofSize: 14, weight: .medium)
|
||||
playbackRateButton.setTitleColor(focusColor, for: .focused)
|
||||
playbackRateButton.setTitleColor(tintColor, for: .normal)
|
||||
definitionButton.tag = PlayerButtonType.definition.rawValue
|
||||
definitionButton.titleFont = .systemFont(ofSize: 14, weight: .medium)
|
||||
definitionButton.setTitleColor(focusColor, for: .focused)
|
||||
definitionButton.setTitleColor(tintColor, for: .normal)
|
||||
audioSwitchButton.tag = PlayerButtonType.audioSwitch.rawValue
|
||||
audioSwitchButton.titleFont = .systemFont(ofSize: 14, weight: .medium)
|
||||
audioSwitchButton.setTitleColor(focusColor, for: .focused)
|
||||
audioSwitchButton.setTitleColor(tintColor, for: .normal)
|
||||
videoSwitchButton.tag = PlayerButtonType.videoSwitch.rawValue
|
||||
videoSwitchButton.titleFont = .systemFont(ofSize: 14, weight: .medium)
|
||||
videoSwitchButton.setTitleColor(focusColor, for: .focused)
|
||||
videoSwitchButton.setTitleColor(tintColor, for: .normal)
|
||||
srtButton.tag = PlayerButtonType.srt.rawValue
|
||||
srtButton.titleFont = .systemFont(ofSize: 14, weight: .medium)
|
||||
srtButton.setTitleColor(focusColor, for: .focused)
|
||||
srtButton.setTitleColor(tintColor, for: .normal)
|
||||
pipButton.tag = PlayerButtonType.pictureInPicture.rawValue
|
||||
pipButton.titleFont = .systemFont(ofSize: 14, weight: .medium)
|
||||
pipButton.setTitleColor(focusColor, for: .focused)
|
||||
pipButton.setTitleColor(tintColor, for: .normal)
|
||||
if #available(macOS 11.0, *) {
|
||||
pipButton.setImage(UIImage(systemName: "pip.enter"), for: .normal)
|
||||
pipButton.setImage(UIImage(systemName: "pip.exit"), for: .selected)
|
||||
playButton.setImage(UIImage(systemName: "play.fill"), for: .normal)
|
||||
playButton.setImage(UIImage(systemName: "pause.fill"), for: .selected)
|
||||
srtButton.setImage(UIImage(systemName: "captions.bubble"), for: .normal)
|
||||
definitionButton.setImage(UIImage(systemName: "arrow.up.right.video"), for: .normal)
|
||||
audioSwitchButton.setImage(UIImage(systemName: "waveform"), for: .normal)
|
||||
videoSwitchButton.setImage(UIImage(systemName: "video.badge.ellipsis"), for: .normal)
|
||||
playbackRateButton.setImage(UIImage(systemName: "speedometer"), for: .normal)
|
||||
}
|
||||
playButton.translatesAutoresizingMaskIntoConstraints = false
|
||||
srtButton.translatesAutoresizingMaskIntoConstraints = false
|
||||
translatesAutoresizingMaskIntoConstraints = false
|
||||
if #available(tvOS 14.0, *) {
|
||||
pipButton.isHidden = !AVPictureInPictureController.isPictureInPictureSupported()
|
||||
}
|
||||
#if os(tvOS)
|
||||
srtButton.fillImage()
|
||||
pipButton.fillImage()
|
||||
playButton.fillImage()
|
||||
definitionButton.fillImage()
|
||||
audioSwitchButton.fillImage()
|
||||
videoSwitchButton.fillImage()
|
||||
playbackRateButton.fillImage()
|
||||
playButton.tintColor = tintColor
|
||||
playbackRateButton.tintColor = tintColor
|
||||
definitionButton.tintColor = tintColor
|
||||
audioSwitchButton.tintColor = tintColor
|
||||
videoSwitchButton.tintColor = tintColor
|
||||
srtButton.tintColor = tintColor
|
||||
pipButton.tintColor = tintColor
|
||||
timeSlider.tintColor = tintColor
|
||||
NSLayoutConstraint.activate([
|
||||
playButton.widthAnchor.constraint(equalTo: playButton.heightAnchor),
|
||||
playbackRateButton.widthAnchor.constraint(equalTo: playbackRateButton.heightAnchor),
|
||||
definitionButton.widthAnchor.constraint(equalTo: definitionButton.heightAnchor),
|
||||
audioSwitchButton.widthAnchor.constraint(equalTo: audioSwitchButton.heightAnchor),
|
||||
videoSwitchButton.widthAnchor.constraint(equalTo: videoSwitchButton.heightAnchor),
|
||||
srtButton.widthAnchor.constraint(equalTo: srtButton.heightAnchor),
|
||||
pipButton.widthAnchor.constraint(equalTo: pipButton.heightAnchor),
|
||||
heightAnchor.constraint(equalToConstant: 40),
|
||||
])
|
||||
#else
|
||||
timeSlider.tintColor = .white
|
||||
playButton.tintColor = .white
|
||||
playbackRateButton.tintColor = .white
|
||||
definitionButton.tintColor = .white
|
||||
audioSwitchButton.tintColor = .white
|
||||
videoSwitchButton.tintColor = .white
|
||||
srtButton.tintColor = .white
|
||||
pipButton.tintColor = .white
|
||||
NSLayoutConstraint.activate([
|
||||
playButton.widthAnchor.constraint(equalToConstant: 30),
|
||||
heightAnchor.constraint(equalToConstant: 49),
|
||||
srtButton.widthAnchor.constraint(equalToConstant: 40),
|
||||
])
|
||||
#endif
|
||||
}
|
||||
|
||||
override public func addArrangedSubview(_ view: UIView) {
|
||||
super.addArrangedSubview(view)
|
||||
view.isHidden = false
|
||||
}
|
||||
|
||||
#if canImport(UIKit)
|
||||
override open func didUpdateFocus(in context: UIFocusUpdateContext, with coordinator: UIFocusAnimationCoordinator) {
|
||||
super.didUpdateFocus(in: context, with: coordinator)
|
||||
if let nextFocusedItem = context.nextFocusedItem {
|
||||
if let nextFocusedButton = nextFocusedItem as? UIButton {
|
||||
nextFocusedButton.tintColor = nextFocusedButton.titleColor(for: .focused)
|
||||
}
|
||||
if context.previouslyFocusedItem != nil,
|
||||
let nextFocusedView = nextFocusedItem as? UIView
|
||||
{
|
||||
onFocusUpdate?(nextFocusedView)
|
||||
}
|
||||
}
|
||||
if let previouslyFocusedItem = context.previouslyFocusedItem as? UIButton {
|
||||
if previouslyFocusedItem.isSelected {
|
||||
previouslyFocusedItem.tintColor = previouslyFocusedItem.titleColor(for: .selected)
|
||||
} else if previouslyFocusedItem.isHighlighted {
|
||||
previouslyFocusedItem.tintColor = previouslyFocusedItem.titleColor(for: .highlighted)
|
||||
} else {
|
||||
previouslyFocusedItem.tintColor = previouslyFocusedItem.titleColor(for: .normal)
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
open func addTarget(_ target: AnyObject?, action: Selector) {
|
||||
playButton.addTarget(target, action: action, for: .primaryActionTriggered)
|
||||
playbackRateButton.addTarget(target, action: action, for: .primaryActionTriggered)
|
||||
definitionButton.addTarget(target, action: action, for: .primaryActionTriggered)
|
||||
audioSwitchButton.addTarget(target, action: action, for: .primaryActionTriggered)
|
||||
videoSwitchButton.addTarget(target, action: action, for: .primaryActionTriggered)
|
||||
srtButton.addTarget(target, action: action, for: .primaryActionTriggered)
|
||||
pipButton.addTarget(target, action: action, for: .primaryActionTriggered)
|
||||
}
|
||||
|
||||
public func reset() {
|
||||
currentTime = 0
|
||||
totalTime = 0
|
||||
playButton.isSelected = false
|
||||
timeSlider.value = 0.0
|
||||
timeSlider.isPlayable = false
|
||||
playbackRateButton.setTitle(NSLocalizedString("speed", comment: ""), for: .normal)
|
||||
}
|
||||
}
|
||||
|
||||
extension KSOptions {
|
||||
static func image(named: String) -> UIImage? {
|
||||
#if canImport(UIKit)
|
||||
return UIImage(named: named, in: .module, compatibleWith: nil)
|
||||
#else
|
||||
return Bundle.module.image(forResource: named)
|
||||
#endif
|
||||
}
|
||||
}
|
||||
220
KSPlayer-main/Sources/KSPlayer/Core/PlayerView.swift
Normal file
220
KSPlayer-main/Sources/KSPlayer/Core/PlayerView.swift
Normal file
@@ -0,0 +1,220 @@
|
||||
//
|
||||
// PlayerView.swift
|
||||
// VoiceNote
|
||||
//
|
||||
// Created by kintan on 2018/8/16.
|
||||
//
|
||||
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
import AVFoundation
|
||||
|
||||
public enum PlayerButtonType: Int {
|
||||
case play = 101
|
||||
case pause
|
||||
case back
|
||||
case srt
|
||||
case landscape
|
||||
case replay
|
||||
case lock
|
||||
case rate
|
||||
case definition
|
||||
case pictureInPicture
|
||||
case audioSwitch
|
||||
case videoSwitch
|
||||
}
|
||||
|
||||
public protocol PlayerControllerDelegate: AnyObject {
|
||||
func playerController(state: KSPlayerState)
|
||||
func playerController(currentTime: TimeInterval, totalTime: TimeInterval)
|
||||
func playerController(finish error: Error?)
|
||||
func playerController(maskShow: Bool)
|
||||
func playerController(action: PlayerButtonType)
|
||||
// `bufferedCount: 0` indicates first time loading
|
||||
func playerController(bufferedCount: Int, consumeTime: TimeInterval)
|
||||
func playerController(seek: TimeInterval)
|
||||
}
|
||||
|
||||
open class PlayerView: UIView, KSPlayerLayerDelegate, KSSliderDelegate {
|
||||
public typealias ControllerDelegate = PlayerControllerDelegate
|
||||
public var playerLayer: KSPlayerLayer? {
|
||||
didSet {
|
||||
playerLayer?.delegate = self
|
||||
}
|
||||
}
|
||||
|
||||
public weak var delegate: ControllerDelegate?
|
||||
public let toolBar = PlayerToolBar()
|
||||
public let srtControl = SubtitleModel()
|
||||
// Listen to play time change
|
||||
public var playTimeDidChange: ((TimeInterval, TimeInterval) -> Void)?
|
||||
public var backBlock: (() -> Void)?
|
||||
public convenience init() {
|
||||
#if os(macOS)
|
||||
self.init(frame: .zero)
|
||||
#else
|
||||
self.init(frame: CGRect(origin: .zero, size: KSOptions.sceneSize))
|
||||
#endif
|
||||
}
|
||||
|
||||
override public init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
toolBar.timeSlider.delegate = self
|
||||
toolBar.addTarget(self, action: #selector(onButtonPressed(_:)))
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
public required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
@objc func onButtonPressed(_ button: UIButton) {
|
||||
guard let type = PlayerButtonType(rawValue: button.tag) else { return }
|
||||
|
||||
#if os(macOS)
|
||||
if let menu = button.menu,
|
||||
let item = button.menu?.items.first(where: { $0.state == .on })
|
||||
{
|
||||
menu.popUp(positioning: item,
|
||||
at: button.frame.origin,
|
||||
in: self)
|
||||
} else {
|
||||
onButtonPressed(type: type, button: button)
|
||||
}
|
||||
#elseif os(tvOS)
|
||||
onButtonPressed(type: type, button: button)
|
||||
#else
|
||||
if #available(iOS 14.0, *), button.menu != nil {
|
||||
return
|
||||
}
|
||||
onButtonPressed(type: type, button: button)
|
||||
#endif
|
||||
}
|
||||
|
||||
open func onButtonPressed(type: PlayerButtonType, button: UIButton) {
|
||||
var type = type
|
||||
if type == .play, button.isSelected {
|
||||
type = .pause
|
||||
}
|
||||
switch type {
|
||||
case .back:
|
||||
backBlock?()
|
||||
case .play, .replay:
|
||||
play()
|
||||
case .pause:
|
||||
pause()
|
||||
default:
|
||||
break
|
||||
}
|
||||
delegate?.playerController(action: type)
|
||||
}
|
||||
|
||||
#if canImport(UIKit)
|
||||
override open func pressesBegan(_ presses: Set<UIPress>, with event: UIPressesEvent?) {
|
||||
guard let presse = presses.first else {
|
||||
return
|
||||
}
|
||||
switch presse.type {
|
||||
case .playPause:
|
||||
if let playerLayer, playerLayer.state.isPlaying {
|
||||
pause()
|
||||
} else {
|
||||
play()
|
||||
}
|
||||
default: super.pressesBegan(presses, with: event)
|
||||
}
|
||||
}
|
||||
#endif
|
||||
open func play() {
|
||||
becomeFirstResponder()
|
||||
playerLayer?.play()
|
||||
toolBar.playButton.isSelected = true
|
||||
}
|
||||
|
||||
open func pause() {
|
||||
playerLayer?.pause()
|
||||
}
|
||||
|
||||
open func seek(time: TimeInterval, completion: @escaping ((Bool) -> Void)) {
|
||||
playerLayer?.seek(time: time, autoPlay: KSOptions.isSeekedAutoPlay, completion: completion)
|
||||
}
|
||||
|
||||
open func resetPlayer() {
|
||||
pause()
|
||||
totalTime = 0.0
|
||||
}
|
||||
|
||||
open func set(url: URL, options: KSOptions) {
|
||||
srtControl.url = url
|
||||
toolBar.currentTime = 0
|
||||
totalTime = 0
|
||||
playerLayer = KSPlayerLayer(url: url, options: options)
|
||||
}
|
||||
|
||||
// MARK: - KSSliderDelegate
|
||||
|
||||
open func slider(value: Double, event: ControlEvents) {
|
||||
if event == .valueChanged {
|
||||
toolBar.currentTime = value
|
||||
} else if event == .touchUpInside {
|
||||
seek(time: value) { [weak self] _ in
|
||||
self?.delegate?.playerController(seek: value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - KSPlayerLayerDelegate
|
||||
|
||||
open func player(layer: KSPlayerLayer, state: KSPlayerState) {
|
||||
delegate?.playerController(state: state)
|
||||
if state == .readyToPlay {
|
||||
totalTime = layer.player.duration
|
||||
toolBar.isSeekable = layer.player.seekable
|
||||
toolBar.playButton.isSelected = true
|
||||
} else if state == .playedToTheEnd || state == .paused || state == .error {
|
||||
toolBar.playButton.isSelected = false
|
||||
}
|
||||
}
|
||||
|
||||
open func player(layer _: KSPlayerLayer, currentTime: TimeInterval, totalTime: TimeInterval) {
|
||||
delegate?.playerController(currentTime: currentTime, totalTime: totalTime)
|
||||
playTimeDidChange?(currentTime, totalTime)
|
||||
toolBar.currentTime = currentTime
|
||||
self.totalTime = totalTime
|
||||
}
|
||||
|
||||
open func player(layer _: KSPlayerLayer, finish error: Error?) {
|
||||
delegate?.playerController(finish: error)
|
||||
}
|
||||
|
||||
open func player(layer _: KSPlayerLayer, bufferedCount: Int, consumeTime: TimeInterval) {
|
||||
delegate?.playerController(bufferedCount: bufferedCount, consumeTime: consumeTime)
|
||||
}
|
||||
}
|
||||
|
||||
public extension PlayerView {
|
||||
var totalTime: TimeInterval {
|
||||
get {
|
||||
toolBar.totalTime
|
||||
}
|
||||
set {
|
||||
toolBar.totalTime = newValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension UIView {
|
||||
var viewController: UIViewController? {
|
||||
var next = next
|
||||
while next != nil {
|
||||
if let viewController = next as? UIViewController {
|
||||
return viewController
|
||||
}
|
||||
next = next?.next
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
210
KSPlayer-main/Sources/KSPlayer/Core/UIKitExtend.swift
Normal file
210
KSPlayer-main/Sources/KSPlayer/Core/UIKitExtend.swift
Normal file
@@ -0,0 +1,210 @@
|
||||
//
|
||||
// File.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
|
||||
public class KSSlider: UXSlider {
|
||||
private var tapGesture: UITapGestureRecognizer!
|
||||
private var panGesture: UIPanGestureRecognizer!
|
||||
weak var delegate: KSSliderDelegate?
|
||||
public var trackHeigt = CGFloat(2)
|
||||
public var isPlayable = false
|
||||
override public init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
tapGesture = UITapGestureRecognizer(target: self, action: #selector(actionTapGesture(sender:)))
|
||||
panGesture = UIPanGestureRecognizer(target: self, action: #selector(actionPanGesture(sender:)))
|
||||
addGestureRecognizer(tapGesture)
|
||||
addGestureRecognizer(panGesture)
|
||||
addTarget(self, action: #selector(progressSliderTouchBegan(_:)), for: .touchDown)
|
||||
addTarget(self, action: #selector(progressSliderValueChanged(_:)), for: .valueChanged)
|
||||
addTarget(self, action: #selector(progressSliderTouchEnded(_:)), for: [.touchUpInside, .touchCancel, .touchUpOutside, .primaryActionTriggered])
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
public required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
override open func trackRect(forBounds bounds: CGRect) -> CGRect {
|
||||
var customBounds = super.trackRect(forBounds: bounds)
|
||||
customBounds.origin.y -= trackHeigt / 2
|
||||
customBounds.size.height = trackHeigt
|
||||
return customBounds
|
||||
}
|
||||
|
||||
override open func thumbRect(forBounds bounds: CGRect, trackRect rect: CGRect, value: Float) -> CGRect {
|
||||
let rect = super.thumbRect(forBounds: bounds, trackRect: rect, value: value)
|
||||
return rect.insetBy(dx: -20, dy: -20)
|
||||
}
|
||||
|
||||
// MARK: - handle UI slider actions
|
||||
|
||||
@objc private func progressSliderTouchBegan(_ sender: KSSlider) {
|
||||
guard isPlayable else { return }
|
||||
tapGesture.isEnabled = false
|
||||
panGesture.isEnabled = false
|
||||
value = value
|
||||
delegate?.slider(value: Double(sender.value), event: .touchDown)
|
||||
}
|
||||
|
||||
@objc private func progressSliderValueChanged(_ sender: KSSlider) {
|
||||
guard isPlayable else { return }
|
||||
delegate?.slider(value: Double(sender.value), event: .valueChanged)
|
||||
}
|
||||
|
||||
@objc private func progressSliderTouchEnded(_ sender: KSSlider) {
|
||||
guard isPlayable else { return }
|
||||
tapGesture.isEnabled = true
|
||||
panGesture.isEnabled = true
|
||||
delegate?.slider(value: Double(sender.value), event: .touchUpInside)
|
||||
}
|
||||
|
||||
@objc private func actionTapGesture(sender: UITapGestureRecognizer) {
|
||||
// guard isPlayable else {
|
||||
// return
|
||||
// }
|
||||
let touchPoint = sender.location(in: self)
|
||||
let value = (maximumValue - minimumValue) * Float(touchPoint.x / frame.size.width)
|
||||
self.value = value
|
||||
delegate?.slider(value: Double(value), event: .valueChanged)
|
||||
delegate?.slider(value: Double(value), event: .touchUpInside)
|
||||
}
|
||||
|
||||
@objc private func actionPanGesture(sender: UIPanGestureRecognizer) {
|
||||
// guard isPlayable else {
|
||||
// return
|
||||
// }
|
||||
let touchPoint = sender.location(in: self)
|
||||
let value = (maximumValue - minimumValue) * Float(touchPoint.x / frame.size.width)
|
||||
self.value = value
|
||||
if sender.state == .began {
|
||||
delegate?.slider(value: Double(value), event: .touchDown)
|
||||
} else if sender.state == .ended {
|
||||
delegate?.slider(value: Double(value), event: .touchUpInside)
|
||||
} else {
|
||||
delegate?.slider(value: Double(value), event: .valueChanged)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if os(tvOS)
|
||||
public class UXSlider: UIProgressView {
|
||||
@IBInspectable public var value: Float {
|
||||
get {
|
||||
progress * maximumValue
|
||||
}
|
||||
set {
|
||||
progress = newValue / maximumValue
|
||||
}
|
||||
}
|
||||
|
||||
@IBInspectable public var maximumValue: Float = 1 {
|
||||
didSet {
|
||||
refresh()
|
||||
}
|
||||
}
|
||||
|
||||
@IBInspectable public var minimumValue: Float = 0 {
|
||||
didSet {
|
||||
refresh()
|
||||
}
|
||||
}
|
||||
|
||||
open var minimumTrackTintColor: UIColor? {
|
||||
get {
|
||||
progressTintColor
|
||||
}
|
||||
set {
|
||||
progressTintColor = newValue
|
||||
}
|
||||
}
|
||||
|
||||
open var maximumTrackTintColor: UIColor? {
|
||||
get {
|
||||
trackTintColor
|
||||
}
|
||||
set {
|
||||
trackTintColor = newValue
|
||||
}
|
||||
}
|
||||
|
||||
open func setThumbImage(_: UIImage?, for _: UIControl.State) {}
|
||||
open func addTarget(_: Any?, action _: Selector, for _: UIControl.Event) {}
|
||||
|
||||
override public init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
setup()
|
||||
}
|
||||
|
||||
public required init?(coder aDecoder: NSCoder) {
|
||||
super.init(coder: aDecoder)
|
||||
setup()
|
||||
}
|
||||
|
||||
// MARK: - private functions
|
||||
|
||||
private func setup() {
|
||||
refresh()
|
||||
}
|
||||
|
||||
private func refresh() {}
|
||||
open func trackRect(forBounds bounds: CGRect) -> CGRect {
|
||||
bounds
|
||||
}
|
||||
|
||||
open func thumbRect(forBounds bounds: CGRect, trackRect _: CGRect, value _: Float) -> CGRect {
|
||||
bounds
|
||||
}
|
||||
}
|
||||
#else
|
||||
public typealias UXSlider = UISlider
|
||||
#endif
|
||||
|
||||
public typealias UIViewContentMode = UIView.ContentMode
|
||||
extension UIButton {
|
||||
func fillImage() {
|
||||
contentMode = .scaleAspectFill
|
||||
contentHorizontalAlignment = .fill
|
||||
contentVerticalAlignment = .fill
|
||||
}
|
||||
|
||||
var titleFont: UIFont? {
|
||||
get {
|
||||
titleLabel?.font
|
||||
}
|
||||
set {
|
||||
titleLabel?.font = newValue
|
||||
}
|
||||
}
|
||||
|
||||
var title: String? {
|
||||
get {
|
||||
titleLabel?.text
|
||||
}
|
||||
set {
|
||||
titleLabel?.text = newValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension UIView {
|
||||
func image() -> UIImage? {
|
||||
UIGraphicsBeginImageContextWithOptions(bounds.size, isOpaque, 0.0)
|
||||
defer { UIGraphicsEndImageContext() }
|
||||
if let context = UIGraphicsGetCurrentContext() {
|
||||
layer.render(in: context)
|
||||
let image = UIGraphicsGetImageFromCurrentImageContext()
|
||||
return image
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
public func centerRotate(byDegrees: Double) {
|
||||
transform = CGAffineTransform(rotationAngle: CGFloat(Double.pi * byDegrees / 180.0))
|
||||
}
|
||||
}
|
||||
#endif
|
||||
48
KSPlayer-main/Sources/KSPlayer/Core/UXKit.swift
Normal file
48
KSPlayer-main/Sources/KSPlayer/Core/UXKit.swift
Normal file
@@ -0,0 +1,48 @@
|
||||
//
|
||||
// File.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
|
||||
extension UIView {
|
||||
var backingLayer: CALayer? {
|
||||
#if !canImport(UIKit)
|
||||
wantsLayer = true
|
||||
#endif
|
||||
return layer
|
||||
}
|
||||
|
||||
var cornerRadius: CGFloat {
|
||||
get {
|
||||
backingLayer?.cornerRadius ?? 0
|
||||
}
|
||||
set {
|
||||
backingLayer?.cornerRadius = newValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@objc public enum ControlEvents: Int {
|
||||
case touchDown
|
||||
case touchUpInside
|
||||
case touchCancel
|
||||
case valueChanged
|
||||
case primaryActionTriggered
|
||||
case mouseEntered
|
||||
case mouseExited
|
||||
}
|
||||
|
||||
protocol KSSliderDelegate: AnyObject {
|
||||
/**
|
||||
call when slider action trigged
|
||||
- parameter value: progress
|
||||
- parameter event: action
|
||||
*/
|
||||
func slider(value: Double, event: ControlEvents)
|
||||
}
|
||||
850
KSPlayer-main/Sources/KSPlayer/Core/Utility.swift
Normal file
850
KSPlayer-main/Sources/KSPlayer/Core/Utility.swift
Normal file
@@ -0,0 +1,850 @@
|
||||
//
|
||||
// Utility.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import CryptoKit
|
||||
import SwiftUI
|
||||
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
#if canImport(MobileCoreServices)
|
||||
import MobileCoreServices.UTType
|
||||
#endif
|
||||
open class LayerContainerView: UIView {
|
||||
#if canImport(UIKit)
|
||||
override open class var layerClass: AnyClass {
|
||||
CAGradientLayer.self
|
||||
}
|
||||
#else
|
||||
override public init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
layer = CAGradientLayer()
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
public required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
#endif
|
||||
public var gradientLayer: CAGradientLayer {
|
||||
// swiftlint:disable force_cast
|
||||
layer as! CAGradientLayer
|
||||
// swiftlint:enable force_cast
|
||||
}
|
||||
}
|
||||
|
||||
class GIFCreator {
|
||||
private let destination: CGImageDestination
|
||||
private let frameProperties: CFDictionary
|
||||
private(set) var firstImage: UIImage?
|
||||
init(savePath: URL, imagesCount: Int) {
|
||||
try? FileManager.default.removeItem(at: savePath)
|
||||
frameProperties = [kCGImagePropertyGIFDictionary: [kCGImagePropertyGIFDelayTime: 0.25]] as CFDictionary
|
||||
destination = CGImageDestinationCreateWithURL(savePath as CFURL, kUTTypeGIF, imagesCount, nil)!
|
||||
let fileProperties = [kCGImagePropertyGIFDictionary: [kCGImagePropertyGIFLoopCount: 0]]
|
||||
CGImageDestinationSetProperties(destination, fileProperties as CFDictionary)
|
||||
}
|
||||
|
||||
func add(image: CGImage) {
|
||||
if firstImage == nil {
|
||||
firstImage = UIImage(cgImage: image)
|
||||
}
|
||||
CGImageDestinationAddImage(destination, image, frameProperties)
|
||||
}
|
||||
|
||||
func finalize() -> Bool {
|
||||
let result = CGImageDestinationFinalize(destination)
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
public extension String {
|
||||
static func systemClockTime(second: Bool = false) -> String {
|
||||
let date = Date()
|
||||
let calendar = Calendar.current
|
||||
let component = calendar.dateComponents([.hour, .minute, .second], from: date)
|
||||
if second {
|
||||
return String(format: "%02i:%02i:%02i", component.hour!, component.minute!, component.second!)
|
||||
} else {
|
||||
return String(format: "%02i:%02i", component.hour!, component.minute!)
|
||||
}
|
||||
}
|
||||
|
||||
/// 把字符串时间转为对应的秒
|
||||
/// - Parameter fromStr: srt 00:02:52,184 ass 0:30:11.56 vtt 00:00.430
|
||||
/// - Returns: 秒
|
||||
func parseDuration() -> TimeInterval {
|
||||
let scanner = Scanner(string: self)
|
||||
|
||||
var hour: Double = 0
|
||||
if split(separator: ":").count > 2 {
|
||||
hour = scanner.scanDouble() ?? 0.0
|
||||
_ = scanner.scanString(":")
|
||||
}
|
||||
|
||||
let min = scanner.scanDouble() ?? 0.0
|
||||
_ = scanner.scanString(":")
|
||||
let sec = scanner.scanDouble() ?? 0.0
|
||||
if scanner.scanString(",") == nil {
|
||||
_ = scanner.scanString(".")
|
||||
}
|
||||
let millisecond = scanner.scanDouble() ?? 0.0
|
||||
return (hour * 3600.0) + (min * 60.0) + sec + (millisecond / 1000.0)
|
||||
}
|
||||
|
||||
func md5() -> String {
|
||||
Data(utf8).md5()
|
||||
}
|
||||
}
|
||||
|
||||
public extension UIColor {
|
||||
convenience init?(assColor: String) {
|
||||
var colorString = assColor
|
||||
// 移除颜色字符串中的前缀 &H 和后缀 &
|
||||
if colorString.hasPrefix("&H") {
|
||||
colorString = String(colorString.dropFirst(2))
|
||||
}
|
||||
if colorString.hasSuffix("&") {
|
||||
colorString = String(colorString.dropLast())
|
||||
}
|
||||
if let hex = Scanner(string: colorString).scanInt(representation: .hexadecimal) {
|
||||
self.init(abgr: hex)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
convenience init(abgr hex: Int) {
|
||||
let alpha = 1 - (CGFloat(hex >> 24 & 0xFF) / 255)
|
||||
let blue = CGFloat((hex >> 16) & 0xFF)
|
||||
let green = CGFloat((hex >> 8) & 0xFF)
|
||||
let red = CGFloat(hex & 0xFF)
|
||||
self.init(red: red / 255.0, green: green / 255.0, blue: blue / 255.0, alpha: alpha)
|
||||
}
|
||||
|
||||
convenience init(rgb hex: Int, alpha: CGFloat = 1) {
|
||||
let red = CGFloat((hex >> 16) & 0xFF)
|
||||
let green = CGFloat((hex >> 8) & 0xFF)
|
||||
let blue = CGFloat(hex & 0xFF)
|
||||
self.init(red: red / 255.0, green: green / 255.0, blue: blue / 255.0, alpha: alpha)
|
||||
}
|
||||
|
||||
func createImage(size: CGSize = CGSize(width: 1, height: 1)) -> UIImage {
|
||||
#if canImport(UIKit)
|
||||
let rect = CGRect(origin: .zero, size: size)
|
||||
UIGraphicsBeginImageContext(rect.size)
|
||||
let context = UIGraphicsGetCurrentContext()
|
||||
context?.setFillColor(cgColor)
|
||||
context?.fill(rect)
|
||||
let image = UIGraphicsGetImageFromCurrentImageContext()
|
||||
UIGraphicsEndImageContext()
|
||||
return image!
|
||||
#else
|
||||
let image = NSImage(size: size)
|
||||
image.lockFocus()
|
||||
drawSwatch(in: CGRect(origin: .zero, size: size))
|
||||
image.unlockFocus()
|
||||
return image
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
extension AVAsset {
|
||||
public func generateGIF(beginTime: TimeInterval, endTime: TimeInterval, interval: Double = 0.2, savePath: URL, progress: @escaping (Double) -> Void, completion: @escaping (Error?) -> Void) {
|
||||
let count = Int(ceil((endTime - beginTime) / interval))
|
||||
let timesM = (0 ..< count).map { NSValue(time: CMTime(seconds: beginTime + Double($0) * interval)) }
|
||||
let imageGenerator = createImageGenerator()
|
||||
let gifCreator = GIFCreator(savePath: savePath, imagesCount: count)
|
||||
var i = 0
|
||||
imageGenerator.generateCGImagesAsynchronously(forTimes: timesM) { _, imageRef, _, result, error in
|
||||
switch result {
|
||||
case .succeeded:
|
||||
guard let imageRef else { return }
|
||||
i += 1
|
||||
gifCreator.add(image: imageRef)
|
||||
progress(Double(i) / Double(count))
|
||||
guard i == count else { return }
|
||||
if gifCreator.finalize() {
|
||||
completion(nil)
|
||||
} else {
|
||||
let error = NSError(domain: AVFoundationErrorDomain, code: -1, userInfo: [NSLocalizedDescriptionKey: "Generate Gif Failed!"])
|
||||
completion(error)
|
||||
}
|
||||
case .failed:
|
||||
if let error {
|
||||
completion(error)
|
||||
}
|
||||
case .cancelled:
|
||||
break
|
||||
@unknown default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func createComposition(beginTime: TimeInterval, endTime: TimeInterval) async throws -> AVMutableComposition {
|
||||
let compositionM = AVMutableComposition()
|
||||
let audioTrackM = compositionM.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
|
||||
let videoTrackM = compositionM.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
|
||||
let cutRange = CMTimeRange(start: beginTime, end: endTime)
|
||||
#if os(xrOS)
|
||||
if let assetAudioTrack = try await loadTracks(withMediaType: .audio).first {
|
||||
try audioTrackM?.insertTimeRange(cutRange, of: assetAudioTrack, at: .zero)
|
||||
}
|
||||
if let assetVideoTrack = try await loadTracks(withMediaType: .video).first {
|
||||
try videoTrackM?.insertTimeRange(cutRange, of: assetVideoTrack, at: .zero)
|
||||
}
|
||||
#else
|
||||
if let assetAudioTrack = tracks(withMediaType: .audio).first {
|
||||
try audioTrackM?.insertTimeRange(cutRange, of: assetAudioTrack, at: .zero)
|
||||
}
|
||||
if let assetVideoTrack = tracks(withMediaType: .video).first {
|
||||
try videoTrackM?.insertTimeRange(cutRange, of: assetVideoTrack, at: .zero)
|
||||
}
|
||||
#endif
|
||||
return compositionM
|
||||
}
|
||||
|
||||
func createExportSession(beginTime: TimeInterval, endTime: TimeInterval) async throws -> AVAssetExportSession? {
|
||||
let compositionM = try await createComposition(beginTime: beginTime, endTime: endTime)
|
||||
guard let exportSession = AVAssetExportSession(asset: compositionM, presetName: "") else {
|
||||
return nil
|
||||
}
|
||||
exportSession.shouldOptimizeForNetworkUse = true
|
||||
exportSession.outputFileType = .mp4
|
||||
return exportSession
|
||||
}
|
||||
|
||||
func exportMp4(beginTime: TimeInterval, endTime: TimeInterval, outputURL: URL, progress: @escaping (Double) -> Void, completion: @escaping (Result<URL, Error>) -> Void) throws {
|
||||
try FileManager.default.removeItem(at: outputURL)
|
||||
Task {
|
||||
guard let exportSession = try await createExportSession(beginTime: beginTime, endTime: endTime) else { return }
|
||||
exportSession.outputURL = outputURL
|
||||
await exportSession.export()
|
||||
switch exportSession.status {
|
||||
case .exporting:
|
||||
progress(Double(exportSession.progress))
|
||||
case .completed:
|
||||
progress(1)
|
||||
completion(.success(outputURL))
|
||||
exportSession.cancelExport()
|
||||
case .failed:
|
||||
if let error = exportSession.error {
|
||||
completion(.failure(error))
|
||||
}
|
||||
exportSession.cancelExport()
|
||||
case .cancelled:
|
||||
exportSession.cancelExport()
|
||||
case .unknown, .waiting:
|
||||
break
|
||||
@unknown default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func exportMp4(beginTime: TimeInterval, endTime: TimeInterval, progress: @escaping (Double) -> Void, completion: @escaping (Result<URL, Error>) -> Void) throws {
|
||||
guard var exportURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return }
|
||||
exportURL = exportURL.appendingPathExtension("Export.mp4")
|
||||
try exportMp4(beginTime: beginTime, endTime: endTime, outputURL: exportURL, progress: progress, completion: completion)
|
||||
}
|
||||
}
|
||||
|
||||
extension UIImageView {
|
||||
func image(url: URL?) {
|
||||
guard let url else { return }
|
||||
DispatchQueue.global().async { [weak self] in
|
||||
guard let self else { return }
|
||||
let data = try? Data(contentsOf: url)
|
||||
let image = data.flatMap { UIImage(data: $0) }
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.image = image
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if canImport(UIKit)
|
||||
extension AVPlayer.HDRMode {
|
||||
var dynamicRange: DynamicRange {
|
||||
if contains(.dolbyVision) {
|
||||
return .dolbyVision
|
||||
} else if contains(.hlg) {
|
||||
return .hlg
|
||||
} else if contains(.hdr10) {
|
||||
return .hdr10
|
||||
} else {
|
||||
return .sdr
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
public extension FourCharCode {
|
||||
var string: String {
|
||||
let cString: [CChar] = [
|
||||
CChar(self >> 24 & 0xFF),
|
||||
CChar(self >> 16 & 0xFF),
|
||||
CChar(self >> 8 & 0xFF),
|
||||
CChar(self & 0xFF),
|
||||
0,
|
||||
]
|
||||
return String(cString: cString)
|
||||
}
|
||||
}
|
||||
|
||||
extension CMTime {
|
||||
init(seconds: TimeInterval) {
|
||||
self.init(seconds: seconds, preferredTimescale: Int32(USEC_PER_SEC))
|
||||
}
|
||||
}
|
||||
|
||||
extension CMTimeRange {
|
||||
init(start: TimeInterval, end: TimeInterval) {
|
||||
self.init(start: CMTime(seconds: start), end: CMTime(seconds: end))
|
||||
}
|
||||
}
|
||||
|
||||
extension CGPoint {
|
||||
var reverse: CGPoint {
|
||||
CGPoint(x: y, y: x)
|
||||
}
|
||||
}
|
||||
|
||||
extension CGSize {
|
||||
var reverse: CGSize {
|
||||
CGSize(width: height, height: width)
|
||||
}
|
||||
|
||||
var toPoint: CGPoint {
|
||||
CGPoint(x: width, y: height)
|
||||
}
|
||||
|
||||
var isHorizonal: Bool {
|
||||
width > height
|
||||
}
|
||||
}
|
||||
|
||||
func * (left: CGSize, right: CGFloat) -> CGSize {
|
||||
CGSize(width: left.width * right, height: left.height * right)
|
||||
}
|
||||
|
||||
func * (left: CGPoint, right: CGFloat) -> CGPoint {
|
||||
CGPoint(x: left.x * right, y: left.y * right)
|
||||
}
|
||||
|
||||
func * (left: CGRect, right: CGFloat) -> CGRect {
|
||||
CGRect(origin: left.origin * right, size: left.size * right)
|
||||
}
|
||||
|
||||
func - (left: CGSize, right: CGSize) -> CGSize {
|
||||
CGSize(width: left.width - right.width, height: left.height - right.height)
|
||||
}
|
||||
|
||||
@inline(__always)
|
||||
@preconcurrency
|
||||
// @MainActor
|
||||
public func runOnMainThread(block: @escaping () -> Void) {
|
||||
if Thread.isMainThread {
|
||||
block()
|
||||
} else {
|
||||
Task {
|
||||
await MainActor.run(body: block)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public extension URL {
|
||||
var isMovie: Bool {
|
||||
if let typeID = try? resourceValues(forKeys: [.typeIdentifierKey]).typeIdentifier as CFString? {
|
||||
return UTTypeConformsTo(typeID, kUTTypeMovie)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var isAudio: Bool {
|
||||
if let typeID = try? resourceValues(forKeys: [.typeIdentifierKey]).typeIdentifier as CFString? {
|
||||
return UTTypeConformsTo(typeID, kUTTypeAudio)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var isSubtitle: Bool {
|
||||
["ass", "srt", "ssa", "vtt"].contains(pathExtension.lowercased())
|
||||
}
|
||||
|
||||
var isPlaylist: Bool {
|
||||
["cue", "m3u", "pls"].contains(pathExtension.lowercased())
|
||||
}
|
||||
|
||||
func parsePlaylist() async throws -> [(String, URL, [String: String])] {
|
||||
let data = try await data()
|
||||
var entrys = data.parsePlaylist()
|
||||
for i in 0 ..< entrys.count {
|
||||
var entry = entrys[i]
|
||||
if entry.1.path.hasPrefix("./") {
|
||||
entry.1 = deletingLastPathComponent().appendingPathComponent(entry.1.path).standardized
|
||||
entrys[i] = entry
|
||||
}
|
||||
}
|
||||
return entrys
|
||||
}
|
||||
|
||||
func data(userAgent: String? = nil) async throws -> Data {
|
||||
if isFileURL {
|
||||
return try Data(contentsOf: self)
|
||||
} else {
|
||||
var request = URLRequest(url: self)
|
||||
if let userAgent {
|
||||
request.addValue(userAgent, forHTTPHeaderField: "User-Agent")
|
||||
}
|
||||
let (data, _) = try await URLSession.shared.data(for: request)
|
||||
return data
|
||||
}
|
||||
}
|
||||
|
||||
func download(userAgent: String? = nil, completion: @escaping ((String, URL) -> Void)) {
|
||||
var request = URLRequest(url: self)
|
||||
if let userAgent {
|
||||
request.addValue(userAgent, forHTTPHeaderField: "User-Agent")
|
||||
}
|
||||
let task = URLSession.shared.downloadTask(with: request) { url, response, _ in
|
||||
guard let url, let response else {
|
||||
return
|
||||
}
|
||||
// 下载的临时文件要马上就用。不然可能会马上被清空
|
||||
completion(response.suggestedFilename ?? url.lastPathComponent, url)
|
||||
}
|
||||
task.resume()
|
||||
}
|
||||
}
|
||||
|
||||
public extension Data {
|
||||
func parsePlaylist() -> [(String, URL, [String: String])] {
|
||||
guard let string = String(data: self, encoding: .utf8) else {
|
||||
return []
|
||||
}
|
||||
let scanner = Scanner(string: string)
|
||||
var entrys = [(String, URL, [String: String])]()
|
||||
guard let symbol = scanner.scanUpToCharacters(from: .newlines), symbol.contains("#EXTM3U") else {
|
||||
return []
|
||||
}
|
||||
while !scanner.isAtEnd {
|
||||
if let entry = scanner.parseM3U() {
|
||||
entrys.append(entry)
|
||||
}
|
||||
}
|
||||
return entrys
|
||||
}
|
||||
|
||||
func md5() -> String {
|
||||
let digestData = Insecure.MD5.hash(data: self)
|
||||
return String(digestData.map { String(format: "%02hhx", $0) }.joined().prefix(32))
|
||||
}
|
||||
}
|
||||
|
||||
extension Scanner {
|
||||
/*
|
||||
#EXTINF:-1 tvg-id="ExampleTV.ua" tvg-logo="https://image.com" group-title="test test", Example TV (720p) [Not 24/7]
|
||||
#EXTVLCOPT:http-referrer=http://example.com/
|
||||
#EXTVLCOPT:http-user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64)
|
||||
http://example.com/stream.m3u8
|
||||
*/
|
||||
func parseM3U() -> (String, URL, [String: String])? {
|
||||
if scanString("#EXTINF:") == nil {
|
||||
_ = scanUpToCharacters(from: .newlines)
|
||||
return nil
|
||||
}
|
||||
var extinf = [String: String]()
|
||||
if let duration = scanDouble() {
|
||||
extinf["duration"] = String(duration)
|
||||
}
|
||||
while scanString(",") == nil {
|
||||
let key = scanUpToString("=")
|
||||
_ = scanString("=\"")
|
||||
let value = scanUpToString("\"")
|
||||
_ = scanString("\"")
|
||||
if let key, let value {
|
||||
extinf[key] = value
|
||||
}
|
||||
}
|
||||
let title = scanUpToCharacters(from: .newlines)
|
||||
while scanString("#EXT") != nil {
|
||||
if scanString("VLCOPT:") != nil {
|
||||
let key = scanUpToString("=")
|
||||
_ = scanString("=")
|
||||
let value = scanUpToCharacters(from: .newlines)
|
||||
if let key, let value {
|
||||
extinf[key] = value
|
||||
}
|
||||
} else {
|
||||
let key = scanUpToString(":")
|
||||
_ = scanString(":")
|
||||
let value = scanUpToCharacters(from: .newlines)
|
||||
if let key, let value {
|
||||
extinf[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
let urlString = scanUpToCharacters(from: .newlines)
|
||||
if let urlString, let url = URL(string: urlString) {
|
||||
return (title ?? url.lastPathComponent, url, extinf)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
extension HTTPURLResponse {
|
||||
var filename: String? {
|
||||
let httpFileName = "attachment; filename="
|
||||
if var disposition = value(forHTTPHeaderField: "Content-Disposition"), disposition.hasPrefix(httpFileName) {
|
||||
disposition.removeFirst(httpFileName.count)
|
||||
return disposition
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public extension Double {
|
||||
var kmFormatted: String {
|
||||
// return .formatted(.number.notation(.compactName))
|
||||
if self >= 1_000_000 {
|
||||
return String(format: "%.1fM", locale: Locale.current, self / 1_000_000)
|
||||
// .replacingOccurrences(of: ".0", with: "")
|
||||
} else if self >= 10000, self <= 999_999 {
|
||||
return String(format: "%.1fK", locale: Locale.current, self / 1000)
|
||||
// .replacingOccurrences(of: ".0", with: "")
|
||||
} else {
|
||||
return String(format: "%.0f", locale: Locale.current, self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension TextAlignment: RawRepresentable {
|
||||
public typealias RawValue = String
|
||||
public init?(rawValue: RawValue) {
|
||||
if rawValue == "Leading" {
|
||||
self = .leading
|
||||
} else if rawValue == "Center" {
|
||||
self = .center
|
||||
} else if rawValue == "Trailing" {
|
||||
self = .trailing
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public var rawValue: RawValue {
|
||||
switch self {
|
||||
case .leading:
|
||||
return "Leading"
|
||||
case .center:
|
||||
return "Center"
|
||||
case .trailing:
|
||||
return "Trailing"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension TextAlignment: Identifiable {
|
||||
public var id: Self { self }
|
||||
}
|
||||
|
||||
extension HorizontalAlignment: Hashable, RawRepresentable {
|
||||
public typealias RawValue = String
|
||||
public init?(rawValue: RawValue) {
|
||||
if rawValue == "Leading" {
|
||||
self = .leading
|
||||
} else if rawValue == "Center" {
|
||||
self = .center
|
||||
} else if rawValue == "Trailing" {
|
||||
self = .trailing
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public var rawValue: RawValue {
|
||||
switch self {
|
||||
case .leading:
|
||||
return "Leading"
|
||||
case .center:
|
||||
return "Center"
|
||||
case .trailing:
|
||||
return "Trailing"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension HorizontalAlignment: Identifiable {
|
||||
public var id: Self { self }
|
||||
}
|
||||
|
||||
extension VerticalAlignment: Hashable, RawRepresentable {
|
||||
public typealias RawValue = String
|
||||
public init?(rawValue: RawValue) {
|
||||
if rawValue == "Top" {
|
||||
self = .top
|
||||
} else if rawValue == "Center" {
|
||||
self = .center
|
||||
} else if rawValue == "Bottom" {
|
||||
self = .bottom
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public var rawValue: RawValue {
|
||||
switch self {
|
||||
case .top:
|
||||
return "Top"
|
||||
case .center:
|
||||
return "Center"
|
||||
case .bottom:
|
||||
return "Bottom"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension VerticalAlignment: Identifiable {
|
||||
public var id: Self { self }
|
||||
}
|
||||
|
||||
extension Color: RawRepresentable {
|
||||
public typealias RawValue = String
|
||||
public init?(rawValue: RawValue) {
|
||||
guard let data = Data(base64Encoded: rawValue) else {
|
||||
self = .black
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
let color = try NSKeyedUnarchiver.unarchivedObject(ofClass: UIColor.self, from: data) ?? .black
|
||||
self = Color(color)
|
||||
} catch {
|
||||
self = .black
|
||||
}
|
||||
}
|
||||
|
||||
public var rawValue: RawValue {
|
||||
do {
|
||||
if #available(macOS 11.0, iOS 14, tvOS 14, *) {
|
||||
let data = try NSKeyedArchiver.archivedData(withRootObject: UIColor(self), requiringSecureCoding: false) as Data
|
||||
return data.base64EncodedString()
|
||||
} else {
|
||||
return ""
|
||||
}
|
||||
} catch {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension Array: RawRepresentable where Element: Codable {
|
||||
public init?(rawValue: String) {
|
||||
guard let data = rawValue.data(using: .utf8),
|
||||
let result = try? JSONDecoder().decode([Element].self, from: data)
|
||||
else { return nil }
|
||||
self = result
|
||||
}
|
||||
|
||||
public var rawValue: String {
|
||||
guard let data = try? JSONEncoder().encode(self),
|
||||
let result = String(data: data, encoding: .utf8)
|
||||
else {
|
||||
return "[]"
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
extension Date: RawRepresentable {
|
||||
public typealias RawValue = String
|
||||
public init?(rawValue: RawValue) {
|
||||
guard let data = rawValue.data(using: .utf8),
|
||||
let date = try? JSONDecoder().decode(Date.self, from: data)
|
||||
else {
|
||||
return nil
|
||||
}
|
||||
self = date
|
||||
}
|
||||
|
||||
public var rawValue: RawValue {
|
||||
guard let data = try? JSONEncoder().encode(self),
|
||||
let result = String(data: data, encoding: .utf8)
|
||||
else {
|
||||
return ""
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
extension CGImage {
|
||||
static func combine(images: [(CGRect, CGImage)]) -> CGImage? {
|
||||
if images.isEmpty {
|
||||
return nil
|
||||
}
|
||||
if images.count == 1 {
|
||||
return images[0].1
|
||||
}
|
||||
var width = 0
|
||||
var height = 0
|
||||
for (rect, _) in images {
|
||||
width = max(width, Int(rect.maxX))
|
||||
height = max(height, Int(rect.maxY))
|
||||
}
|
||||
let bitsPerComponent = 8
|
||||
// RGBA(的bytes) * bitsPerComponent *width
|
||||
let bytesPerRow = 4 * 8 * bitsPerComponent * width
|
||||
return autoreleasepool {
|
||||
let context = CGContext(data: nil, width: width, height: height, bitsPerComponent: bitsPerComponent, bytesPerRow: bytesPerRow, space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
|
||||
guard let context else {
|
||||
return nil
|
||||
}
|
||||
// context.clear(CGRect(origin: .zero, size: CGSize(width: width, height: height)))
|
||||
for (rect, cgImage) in images {
|
||||
context.draw(cgImage, in: CGRect(x: rect.origin.x, y: CGFloat(height) - rect.maxY, width: rect.width, height: rect.height))
|
||||
}
|
||||
let cgImage = context.makeImage()
|
||||
return cgImage
|
||||
}
|
||||
}
|
||||
|
||||
func data(type: AVFileType, quality: CGFloat) -> Data? {
|
||||
autoreleasepool {
|
||||
guard let mutableData = CFDataCreateMutable(nil, 0),
|
||||
let destination = CGImageDestinationCreateWithData(mutableData, type.rawValue as CFString, 1, nil)
|
||||
else {
|
||||
return nil
|
||||
}
|
||||
CGImageDestinationAddImage(destination, self, [kCGImageDestinationLossyCompressionQuality: quality] as CFDictionary)
|
||||
guard CGImageDestinationFinalize(destination) else {
|
||||
return nil
|
||||
}
|
||||
return mutableData as Data
|
||||
}
|
||||
}
|
||||
|
||||
static func make(rgbData: UnsafePointer<UInt8>, linesize: Int, width: Int, height: Int, isAlpha: Bool = false) -> CGImage? {
|
||||
let colorSpace = CGColorSpaceCreateDeviceRGB()
|
||||
let bitmapInfo: CGBitmapInfo = isAlpha ? CGBitmapInfo(rawValue: CGImageAlphaInfo.last.rawValue) : CGBitmapInfo.byteOrderMask
|
||||
guard let data = CFDataCreate(kCFAllocatorDefault, rgbData, linesize * height), let provider = CGDataProvider(data: data) else {
|
||||
return nil
|
||||
}
|
||||
// swiftlint:disable line_length
|
||||
return CGImage(width: width, height: height, bitsPerComponent: 8, bitsPerPixel: isAlpha ? 32 : 24, bytesPerRow: linesize, space: colorSpace, bitmapInfo: bitmapInfo, provider: provider, decode: nil, shouldInterpolate: false, intent: .defaultIntent)
|
||||
// swiftlint:enable line_length
|
||||
}
|
||||
}
|
||||
|
||||
public extension AVFileType {
|
||||
static let png = AVFileType(kUTTypePNG as String)
|
||||
static let jpeg2000 = AVFileType(kUTTypeJPEG2000 as String)
|
||||
}
|
||||
|
||||
extension URL: Identifiable {
|
||||
public var id: Self { self }
|
||||
}
|
||||
|
||||
extension String: Identifiable {
|
||||
public var id: Self { self }
|
||||
}
|
||||
|
||||
extension Float: Identifiable {
|
||||
public var id: Self { self }
|
||||
}
|
||||
|
||||
public enum Either<Left, Right> {
|
||||
case left(Left), right(Right)
|
||||
}
|
||||
|
||||
public extension Either {
|
||||
init(_ left: Left, or _: Right.Type) { self = .left(left) }
|
||||
init(_ left: Left) { self = .left(left) }
|
||||
init(_ right: Right) { self = .right(right) }
|
||||
}
|
||||
|
||||
/// Allows to "box" another value.
|
||||
final class Box<T> {
|
||||
let value: T
|
||||
|
||||
init(_ value: T) {
|
||||
self.value = value
|
||||
}
|
||||
}
|
||||
|
||||
extension Array {
|
||||
init(tuple: (Element, Element, Element, Element, Element, Element, Element, Element)) {
|
||||
self.init([tuple.0, tuple.1, tuple.2, tuple.3, tuple.4, tuple.5, tuple.6, tuple.7])
|
||||
}
|
||||
|
||||
init(tuple: (Element, Element, Element, Element)) {
|
||||
self.init([tuple.0, tuple.1, tuple.2, tuple.3])
|
||||
}
|
||||
|
||||
var tuple8: (Element, Element, Element, Element, Element, Element, Element, Element) {
|
||||
(self[0], self[1], self[2], self[3], self[4], self[5], self[6], self[7])
|
||||
}
|
||||
|
||||
var tuple4: (Element, Element, Element, Element) {
|
||||
(self[0], self[1], self[2], self[3])
|
||||
}
|
||||
|
||||
// 归并排序才是稳定排序。系统默认是快排
|
||||
func mergeSortBottomUp(isOrderedBefore: (Element, Element) -> Bool) -> [Element] {
|
||||
let n = count
|
||||
var z = [self, self] // the two working arrays
|
||||
var d = 0 // z[d] is used for reading, z[1 - d] for writing
|
||||
var width = 1
|
||||
while width < n {
|
||||
var i = 0
|
||||
while i < n {
|
||||
var j = i
|
||||
var l = i
|
||||
var r = i + width
|
||||
|
||||
let lmax = Swift.min(l + width, n)
|
||||
let rmax = Swift.min(r + width, n)
|
||||
|
||||
while l < lmax, r < rmax {
|
||||
if isOrderedBefore(z[d][l], z[d][r]) {
|
||||
z[1 - d][j] = z[d][l]
|
||||
l += 1
|
||||
} else {
|
||||
z[1 - d][j] = z[d][r]
|
||||
r += 1
|
||||
}
|
||||
j += 1
|
||||
}
|
||||
while l < lmax {
|
||||
z[1 - d][j] = z[d][l]
|
||||
j += 1
|
||||
l += 1
|
||||
}
|
||||
while r < rmax {
|
||||
z[1 - d][j] = z[d][r]
|
||||
j += 1
|
||||
r += 1
|
||||
}
|
||||
|
||||
i += width * 2
|
||||
}
|
||||
|
||||
width *= 2 // in each step, the subarray to merge becomes larger
|
||||
d = 1 - d // swap active array
|
||||
}
|
||||
return z[d]
|
||||
}
|
||||
}
|
||||
550
KSPlayer-main/Sources/KSPlayer/MEPlayer/AVFFmpegExtension.swift
Normal file
550
KSPlayer-main/Sources/KSPlayer/MEPlayer/AVFFmpegExtension.swift
Normal file
@@ -0,0 +1,550 @@
|
||||
import CoreMedia
|
||||
import FFmpegKit
|
||||
import Libavcodec
|
||||
import Libavfilter
|
||||
import Libavformat
|
||||
|
||||
func toDictionary(_ native: OpaquePointer?) -> [String: String] {
|
||||
var dict = [String: String]()
|
||||
if let native {
|
||||
var prev: UnsafeMutablePointer<AVDictionaryEntry>?
|
||||
while let tag = av_dict_get(native, "", prev, AV_DICT_IGNORE_SUFFIX) {
|
||||
dict[String(cString: tag.pointee.key)] = String(cString: tag.pointee.value)
|
||||
prev = tag
|
||||
}
|
||||
}
|
||||
return dict
|
||||
}
|
||||
|
||||
extension UnsafeMutablePointer where Pointee == AVCodecContext {
|
||||
func getFormat() {
|
||||
pointee.get_format = { ctx, fmt -> AVPixelFormat in
|
||||
guard let fmt, let ctx else {
|
||||
return AV_PIX_FMT_NONE
|
||||
}
|
||||
var i = 0
|
||||
while fmt[i] != AV_PIX_FMT_NONE {
|
||||
if fmt[i] == AV_PIX_FMT_VIDEOTOOLBOX {
|
||||
let deviceCtx = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
|
||||
if deviceCtx == nil {
|
||||
break
|
||||
}
|
||||
// 只要有hw_device_ctx就可以了。不需要hw_frames_ctx
|
||||
ctx.pointee.hw_device_ctx = deviceCtx
|
||||
// var framesCtx = av_hwframe_ctx_alloc(deviceCtx)
|
||||
// if let framesCtx {
|
||||
// let framesCtxData = UnsafeMutableRawPointer(framesCtx.pointee.data)
|
||||
// .bindMemory(to: AVHWFramesContext.self, capacity: 1)
|
||||
// framesCtxData.pointee.format = AV_PIX_FMT_VIDEOTOOLBOX
|
||||
// framesCtxData.pointee.sw_format = ctx.pointee.pix_fmt.bestPixelFormat
|
||||
// framesCtxData.pointee.width = ctx.pointee.width
|
||||
// framesCtxData.pointee.height = ctx.pointee.height
|
||||
// }
|
||||
// if av_hwframe_ctx_init(framesCtx) != 0 {
|
||||
// av_buffer_unref(&framesCtx)
|
||||
// break
|
||||
// }
|
||||
// ctx.pointee.hw_frames_ctx = framesCtx
|
||||
return fmt[i]
|
||||
}
|
||||
i += 1
|
||||
}
|
||||
return fmt[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AVCodecContext {
|
||||
func parseASSEvents() -> Int {
|
||||
var subtitleASSEvents = 10
|
||||
if subtitle_header_size > 0, let events = String(data: Data(bytes: subtitle_header, count: Int(subtitle_header_size)), encoding: .ascii), let eventsRange = events.range(of: "[Events]") {
|
||||
var range = eventsRange.upperBound ..< events.endIndex
|
||||
if let eventsRange = events.range(of: "Format:", options: String.CompareOptions(rawValue: 0), range: range, locale: nil) {
|
||||
range = eventsRange.upperBound ..< events.endIndex
|
||||
if let eventsRange = events.rangeOfCharacter(from: CharacterSet.newlines, options: String.CompareOptions(rawValue: 0), range: range) {
|
||||
range = range.lowerBound ..< eventsRange.upperBound
|
||||
let format = events[range]
|
||||
let fields = format.components(separatedBy: ",")
|
||||
let text = fields.last
|
||||
if let text, text.trimmingCharacters(in: .whitespacesAndNewlines) == "Text" {
|
||||
subtitleASSEvents = fields.count
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return subtitleASSEvents
|
||||
}
|
||||
}
|
||||
|
||||
extension AVCodecParameters {
|
||||
mutating func createContext(options: KSOptions?) throws -> UnsafeMutablePointer<AVCodecContext> {
|
||||
var codecContextOption = avcodec_alloc_context3(nil)
|
||||
guard let codecContext = codecContextOption else {
|
||||
throw NSError(errorCode: .codecContextCreate)
|
||||
}
|
||||
var result = avcodec_parameters_to_context(codecContext, &self)
|
||||
guard result == 0 else {
|
||||
avcodec_free_context(&codecContextOption)
|
||||
throw NSError(errorCode: .codecContextSetParam, avErrorCode: result)
|
||||
}
|
||||
if codec_type == AVMEDIA_TYPE_VIDEO, options?.hardwareDecode ?? false {
|
||||
codecContext.getFormat()
|
||||
}
|
||||
guard let codec = avcodec_find_decoder(codecContext.pointee.codec_id) else {
|
||||
avcodec_free_context(&codecContextOption)
|
||||
throw NSError(errorCode: .codecContextFindDecoder, avErrorCode: result)
|
||||
}
|
||||
codecContext.pointee.codec_id = codec.pointee.id
|
||||
codecContext.pointee.flags2 |= AV_CODEC_FLAG2_FAST
|
||||
if options?.codecLowDelay == true {
|
||||
codecContext.pointee.flags |= AV_CODEC_FLAG_LOW_DELAY
|
||||
}
|
||||
var avOptions = options?.decoderOptions.avOptions
|
||||
if let options {
|
||||
var lowres = options.lowres
|
||||
if lowres > codec.pointee.max_lowres {
|
||||
lowres = codec.pointee.max_lowres
|
||||
}
|
||||
codecContext.pointee.lowres = Int32(lowres)
|
||||
if lowres > 0 {
|
||||
av_dict_set_int(&avOptions, "lowres", Int64(lowres), 0)
|
||||
}
|
||||
}
|
||||
result = avcodec_open2(codecContext, codec, &avOptions)
|
||||
av_dict_free(&avOptions)
|
||||
guard result == 0 else {
|
||||
avcodec_free_context(&codecContextOption)
|
||||
throw NSError(errorCode: .codesContextOpen, avErrorCode: result)
|
||||
}
|
||||
return codecContext
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Clients who specify AVVideoColorPropertiesKey must specify a color primary, transfer function, and Y'CbCr matrix.
|
||||
Most clients will want to specify HD, which consists of:
|
||||
|
||||
AVVideoColorPrimaries_ITU_R_709_2
|
||||
AVVideoTransferFunction_ITU_R_709_2
|
||||
AVVideoYCbCrMatrix_ITU_R_709_2
|
||||
|
||||
If you require SD colorimetry use:
|
||||
|
||||
AVVideoColorPrimaries_SMPTE_C
|
||||
AVVideoTransferFunction_ITU_R_709_2
|
||||
AVVideoYCbCrMatrix_ITU_R_601_4
|
||||
|
||||
If you require wide gamut HD colorimetry, you can use:
|
||||
|
||||
AVVideoColorPrimaries_P3_D65
|
||||
AVVideoTransferFunction_ITU_R_709_2
|
||||
AVVideoYCbCrMatrix_ITU_R_709_2
|
||||
|
||||
If you require 10-bit wide gamut HD colorimetry, you can use:
|
||||
|
||||
AVVideoColorPrimaries_P3_D65
|
||||
AVVideoTransferFunction_ITU_R_2100_HLG
|
||||
AVVideoYCbCrMatrix_ITU_R_709_2
|
||||
*/
|
||||
extension AVColorPrimaries {
|
||||
var colorPrimaries: CFString? {
|
||||
switch self {
|
||||
case AVCOL_PRI_BT470BG:
|
||||
return kCVImageBufferColorPrimaries_EBU_3213
|
||||
case AVCOL_PRI_SMPTE170M:
|
||||
return kCVImageBufferColorPrimaries_SMPTE_C
|
||||
case AVCOL_PRI_BT709:
|
||||
return kCVImageBufferColorPrimaries_ITU_R_709_2
|
||||
case AVCOL_PRI_BT2020:
|
||||
return kCVImageBufferColorPrimaries_ITU_R_2020
|
||||
default:
|
||||
return CVColorPrimariesGetStringForIntegerCodePoint(Int32(rawValue))?.takeUnretainedValue()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AVColorTransferCharacteristic {
|
||||
var transferFunction: CFString? {
|
||||
switch self {
|
||||
case AVCOL_TRC_SMPTE2084:
|
||||
return kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ
|
||||
case AVCOL_TRC_BT2020_10, AVCOL_TRC_BT2020_12:
|
||||
return kCVImageBufferTransferFunction_ITU_R_2020
|
||||
case AVCOL_TRC_BT709:
|
||||
return kCVImageBufferTransferFunction_ITU_R_709_2
|
||||
case AVCOL_TRC_SMPTE240M:
|
||||
return kCVImageBufferTransferFunction_SMPTE_240M_1995
|
||||
case AVCOL_TRC_LINEAR:
|
||||
return kCVImageBufferTransferFunction_Linear
|
||||
case AVCOL_TRC_SMPTE428:
|
||||
return kCVImageBufferTransferFunction_SMPTE_ST_428_1
|
||||
case AVCOL_TRC_ARIB_STD_B67:
|
||||
return kCVImageBufferTransferFunction_ITU_R_2100_HLG
|
||||
case AVCOL_TRC_GAMMA22, AVCOL_TRC_GAMMA28:
|
||||
return kCVImageBufferTransferFunction_UseGamma
|
||||
default:
|
||||
return CVTransferFunctionGetStringForIntegerCodePoint(Int32(rawValue))?.takeUnretainedValue()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AVColorSpace {
|
||||
var ycbcrMatrix: CFString? {
|
||||
switch self {
|
||||
case AVCOL_SPC_BT709:
|
||||
return kCVImageBufferYCbCrMatrix_ITU_R_709_2
|
||||
case AVCOL_SPC_BT470BG, AVCOL_SPC_SMPTE170M:
|
||||
return kCVImageBufferYCbCrMatrix_ITU_R_601_4
|
||||
case AVCOL_SPC_SMPTE240M:
|
||||
return kCVImageBufferYCbCrMatrix_SMPTE_240M_1995
|
||||
case AVCOL_SPC_BT2020_CL, AVCOL_SPC_BT2020_NCL:
|
||||
return kCVImageBufferYCbCrMatrix_ITU_R_2020
|
||||
default:
|
||||
return CVYCbCrMatrixGetStringForIntegerCodePoint(Int32(rawValue))?.takeUnretainedValue()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AVChromaLocation {
|
||||
var chroma: CFString? {
|
||||
switch self {
|
||||
case AVCHROMA_LOC_LEFT:
|
||||
return kCVImageBufferChromaLocation_Left
|
||||
case AVCHROMA_LOC_CENTER:
|
||||
return kCVImageBufferChromaLocation_Center
|
||||
case AVCHROMA_LOC_TOP:
|
||||
return kCVImageBufferChromaLocation_Top
|
||||
case AVCHROMA_LOC_BOTTOM:
|
||||
return kCVImageBufferChromaLocation_Bottom
|
||||
case AVCHROMA_LOC_TOPLEFT:
|
||||
return kCVImageBufferChromaLocation_TopLeft
|
||||
case AVCHROMA_LOC_BOTTOMLEFT:
|
||||
return kCVImageBufferChromaLocation_BottomLeft
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AVPixelFormat {
|
||||
var bitDepth: Int32 {
|
||||
let descriptor = av_pix_fmt_desc_get(self)
|
||||
return descriptor?.pointee.comp.0.depth ?? 8
|
||||
}
|
||||
|
||||
var planeCount: UInt8 {
|
||||
if let desc = av_pix_fmt_desc_get(self) {
|
||||
switch desc.pointee.nb_components {
|
||||
case 3:
|
||||
return UInt8(desc.pointee.comp.2.plane + 1)
|
||||
case 2:
|
||||
return UInt8(desc.pointee.comp.1.plane + 1)
|
||||
default:
|
||||
return UInt8(desc.pointee.comp.0.plane + 1)
|
||||
}
|
||||
} else {
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
var leftShift: UInt8 {
|
||||
if [AV_PIX_FMT_YUV420P10LE, AV_PIX_FMT_YUV422P10LE, AV_PIX_FMT_YUV444P10LE].contains(self) {
|
||||
return 6
|
||||
} else {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
// videotoolbox_best_pixel_format
|
||||
var bestPixelFormat: AVPixelFormat {
|
||||
if let desc = av_pix_fmt_desc_get(self) {
|
||||
if desc.pointee.flags & UInt64(AV_PIX_FMT_FLAG_ALPHA) != 0 {
|
||||
return AV_PIX_FMT_AYUV64LE
|
||||
}
|
||||
let depth = desc.pointee.comp.0.depth
|
||||
if depth > 10 {
|
||||
return desc.pointee.log2_chroma_w == 0 ? AV_PIX_FMT_P416LE : AV_PIX_FMT_P216LE
|
||||
}
|
||||
if desc.pointee.log2_chroma_w == 0 {
|
||||
return depth <= 8 ? AV_PIX_FMT_NV24 : AV_PIX_FMT_P410LE
|
||||
}
|
||||
if desc.pointee.log2_chroma_h == 0 {
|
||||
return depth <= 8 ? AV_PIX_FMT_NV16 : AV_PIX_FMT_P210LE
|
||||
}
|
||||
return depth <= 8 ? AV_PIX_FMT_NV12 : AV_PIX_FMT_P010LE
|
||||
} else {
|
||||
return AV_PIX_FMT_NV12
|
||||
}
|
||||
}
|
||||
|
||||
// swiftlint:disable cyclomatic_complexity
|
||||
// avfoundation.m
|
||||
func osType(fullRange: Bool = false) -> OSType? {
|
||||
switch self {
|
||||
case AV_PIX_FMT_MONOBLACK: return kCVPixelFormatType_1Monochrome
|
||||
// case AV_PIX_FMT_PAL8: return kCVPixelFormatType_32RGBA
|
||||
case AV_PIX_FMT_GRAY8: return kCVPixelFormatType_OneComponent8
|
||||
case AV_PIX_FMT_RGB555BE: return kCVPixelFormatType_16BE555
|
||||
case AV_PIX_FMT_RGB555LE: return kCVPixelFormatType_16LE555
|
||||
case AV_PIX_FMT_RGB565BE: return kCVPixelFormatType_16BE565
|
||||
case AV_PIX_FMT_RGB565LE: return kCVPixelFormatType_16LE565
|
||||
// PixelBufferPool 无法支持24BGR
|
||||
// case AV_PIX_FMT_BGR24: return kCVPixelFormatType_24BGR
|
||||
case AV_PIX_FMT_RGB24: return kCVPixelFormatType_24RGB
|
||||
case AV_PIX_FMT_0RGB: return kCVPixelFormatType_32ARGB
|
||||
case AV_PIX_FMT_ARGB: return kCVPixelFormatType_32ARGB
|
||||
case AV_PIX_FMT_BGR0: return kCVPixelFormatType_32BGRA
|
||||
case AV_PIX_FMT_BGRA: return kCVPixelFormatType_32BGRA
|
||||
case AV_PIX_FMT_0BGR: return kCVPixelFormatType_32ABGR
|
||||
case AV_PIX_FMT_RGB0: return kCVPixelFormatType_32RGBA
|
||||
case AV_PIX_FMT_RGBA: return kCVPixelFormatType_32RGBA
|
||||
case AV_PIX_FMT_BGR48BE, AV_PIX_FMT_BGR48LE: return kCVPixelFormatType_48RGB
|
||||
case AV_PIX_FMT_NV12: return fullRange ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
|
||||
// AVSampleBufferDisplayLayer不能显示 kCVPixelFormatType_420YpCbCr8PlanarFullRange,所以换成是kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
|
||||
case AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUVJ420P: return fullRange ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_420YpCbCr8Planar
|
||||
case AV_PIX_FMT_P010BE, AV_PIX_FMT_P010LE, AV_PIX_FMT_YUV420P10BE, AV_PIX_FMT_YUV420P10LE: return fullRange ? kCVPixelFormatType_420YpCbCr10BiPlanarFullRange : kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange
|
||||
case AV_PIX_FMT_UYVY422: return kCVPixelFormatType_422YpCbCr8
|
||||
case AV_PIX_FMT_YUYV422: return kCVPixelFormatType_422YpCbCr8_yuvs
|
||||
case AV_PIX_FMT_NV16: return fullRange ? kCVPixelFormatType_422YpCbCr8BiPlanarFullRange : kCVPixelFormatType_422YpCbCr8BiPlanarVideoRange
|
||||
case AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUVJ422P: return fullRange ? kCVPixelFormatType_422YpCbCr8BiPlanarFullRange : kCVPixelFormatType_422YpCbCr8BiPlanarVideoRange
|
||||
case AV_PIX_FMT_Y210BE, AV_PIX_FMT_Y210LE: return kCVPixelFormatType_422YpCbCr10
|
||||
case AV_PIX_FMT_P210BE, AV_PIX_FMT_P210LE, AV_PIX_FMT_YUV422P10BE, AV_PIX_FMT_YUV422P10LE: return fullRange ? kCVPixelFormatType_422YpCbCr10BiPlanarFullRange : kCVPixelFormatType_422YpCbCr10BiPlanarVideoRange
|
||||
case AV_PIX_FMT_P216BE, AV_PIX_FMT_P216LE, AV_PIX_FMT_YUV422P16BE, AV_PIX_FMT_YUV422P16LE: return kCVPixelFormatType_422YpCbCr16BiPlanarVideoRange
|
||||
case AV_PIX_FMT_NV24, AV_PIX_FMT_YUV444P: return fullRange ? kCVPixelFormatType_444YpCbCr8BiPlanarFullRange : kCVPixelFormatType_444YpCbCr8BiPlanarVideoRange
|
||||
case AV_PIX_FMT_YUVA444P: return kCVPixelFormatType_4444YpCbCrA8R
|
||||
case AV_PIX_FMT_P410BE, AV_PIX_FMT_P410LE, AV_PIX_FMT_YUV444P10BE, AV_PIX_FMT_YUV444P10LE: return fullRange ? kCVPixelFormatType_444YpCbCr10BiPlanarFullRange : kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange
|
||||
case AV_PIX_FMT_P416BE, AV_PIX_FMT_P416LE: return kCVPixelFormatType_444YpCbCr16BiPlanarVideoRange
|
||||
case AV_PIX_FMT_AYUV64BE, AV_PIX_FMT_AYUV64LE: return kCVPixelFormatType_4444AYpCbCr16
|
||||
case AV_PIX_FMT_YUVA444P16BE, AV_PIX_FMT_YUVA444P16LE: return kCVPixelFormatType_4444AYpCbCr16
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
// swiftlint:enable cyclomatic_complexity
|
||||
}
|
||||
|
||||
extension AVCodecID {
|
||||
var mediaSubType: CMFormatDescription.MediaSubType {
|
||||
switch self {
|
||||
case AV_CODEC_ID_H263:
|
||||
return .h263
|
||||
case AV_CODEC_ID_H264:
|
||||
return .h264
|
||||
case AV_CODEC_ID_HEVC:
|
||||
return .hevc
|
||||
case AV_CODEC_ID_MPEG1VIDEO:
|
||||
return .mpeg1Video
|
||||
case AV_CODEC_ID_MPEG2VIDEO:
|
||||
return .mpeg2Video
|
||||
case AV_CODEC_ID_MPEG4:
|
||||
return .mpeg4Video
|
||||
case AV_CODEC_ID_VP9:
|
||||
return CMFormatDescription.MediaSubType(rawValue: kCMVideoCodecType_VP9)
|
||||
case AV_CODEC_ID_AAC:
|
||||
return .mpeg4AAC
|
||||
case AV_CODEC_ID_AC3:
|
||||
return .ac3
|
||||
case AV_CODEC_ID_ADPCM_IMA_QT:
|
||||
return .appleIMA4
|
||||
case AV_CODEC_ID_ALAC:
|
||||
return .appleLossless
|
||||
case AV_CODEC_ID_AMR_NB:
|
||||
return .amr
|
||||
case AV_CODEC_ID_EAC3:
|
||||
return .enhancedAC3
|
||||
case AV_CODEC_ID_GSM_MS:
|
||||
return .microsoftGSM
|
||||
case AV_CODEC_ID_ILBC:
|
||||
return .iLBC
|
||||
case AV_CODEC_ID_MP1:
|
||||
return .mpegLayer1
|
||||
case AV_CODEC_ID_MP2:
|
||||
return .mpegLayer2
|
||||
case AV_CODEC_ID_MP3:
|
||||
return .mpegLayer3
|
||||
case AV_CODEC_ID_PCM_ALAW:
|
||||
return .aLaw
|
||||
case AV_CODEC_ID_PCM_MULAW:
|
||||
return .uLaw
|
||||
case AV_CODEC_ID_QDMC:
|
||||
return .qDesign
|
||||
case AV_CODEC_ID_QDM2:
|
||||
return .qDesign2
|
||||
default:
|
||||
return CMFormatDescription.MediaSubType(rawValue: 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AVRational {
|
||||
var size: CGSize {
|
||||
num > 0 && den > 0 ? CGSize(width: Int(num), height: Int(den)) : CGSize(width: 1, height: 1)
|
||||
}
|
||||
}
|
||||
|
||||
extension AVBufferSrcParameters: Equatable {
|
||||
public static func == (lhs: AVBufferSrcParameters, rhs: AVBufferSrcParameters) -> Bool {
|
||||
lhs.format == rhs.format && lhs.width == rhs.width && lhs.height == rhs.height && lhs.sample_aspect_ratio == rhs.sample_aspect_ratio && lhs.sample_rate == rhs.sample_rate && lhs.ch_layout == rhs.ch_layout
|
||||
}
|
||||
|
||||
var arg: String {
|
||||
if sample_rate > 0 {
|
||||
let fmt = String(cString: av_get_sample_fmt_name(AVSampleFormat(rawValue: format)))
|
||||
return "sample_rate=\(sample_rate):sample_fmt=\(fmt):time_base=\(time_base.num)/\(time_base.den):channels=\(ch_layout.nb_channels):channel_layout=\(ch_layout.description)"
|
||||
} else {
|
||||
return "video_size=\(width)x\(height):pix_fmt=\(format):time_base=\(time_base.num)/\(time_base.den):pixel_aspect=\(sample_aspect_ratio.num)/\(sample_aspect_ratio.den)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AVChannelLayout: Equatable {
|
||||
public static func == (lhs: AVChannelLayout, rhs: AVChannelLayout) -> Bool {
|
||||
var lhs = lhs
|
||||
var rhs = rhs
|
||||
return av_channel_layout_compare(&lhs, &rhs) == 0
|
||||
}
|
||||
}
|
||||
|
||||
extension AVChannelLayout: CustomStringConvertible {
|
||||
static let defaultValue = AVChannelLayout(order: AV_CHANNEL_ORDER_NATIVE, nb_channels: 2, u: AVChannelLayout.__Unnamed_union_u(mask: swift_AV_CH_LAYOUT_STEREO), opaque: nil)
|
||||
var layoutTag: AudioChannelLayoutTag? {
|
||||
KSLog("[audio] FFmepg AVChannelLayout: \(self) order: \(order) mask: \(u.mask)")
|
||||
let tag = layoutMapTuple.first { _, mask in
|
||||
u.mask == mask
|
||||
}?.tag
|
||||
if let tag {
|
||||
return tag
|
||||
} else {
|
||||
KSLog("[audio] can not find AudioChannelLayoutTag FFmepg channelLayout: \(self) order: \(order) mask: \(u.mask)")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public var description: String {
|
||||
var channelLayout = self
|
||||
var str = [Int8](repeating: 0, count: 64)
|
||||
_ = av_channel_layout_describe(&channelLayout, &str, str.count)
|
||||
return String(cString: str)
|
||||
}
|
||||
}
|
||||
|
||||
extension AVRational: Equatable {
|
||||
public static func == (lhs: AVRational, rhs: AVRational) -> Bool {
|
||||
lhs.num == rhs.num && rhs.den == rhs.den
|
||||
}
|
||||
}
|
||||
|
||||
public struct AVError: Error, Equatable {
|
||||
public var code: Int32
|
||||
public var message: String
|
||||
|
||||
init(code: Int32) {
|
||||
self.code = code
|
||||
message = String(avErrorCode: code)
|
||||
}
|
||||
}
|
||||
|
||||
public extension Dictionary where Key == String {
|
||||
var avOptions: OpaquePointer? {
|
||||
var avOptions: OpaquePointer?
|
||||
forEach { key, value in
|
||||
if let i = value as? Int64 {
|
||||
av_dict_set_int(&avOptions, key, i, 0)
|
||||
} else if let i = value as? Int {
|
||||
av_dict_set_int(&avOptions, key, Int64(i), 0)
|
||||
} else if let string = value as? String {
|
||||
av_dict_set(&avOptions, key, string, 0)
|
||||
} else if let dic = value as? Dictionary {
|
||||
let string = dic.map { "\($0.0)=\($0.1)" }.joined(separator: "\r\n")
|
||||
av_dict_set(&avOptions, key, string, 0)
|
||||
} else if let array = value as? [String] {
|
||||
let string = array.joined(separator: "+")
|
||||
av_dict_set(&avOptions, key, string, 0)
|
||||
}
|
||||
}
|
||||
return avOptions
|
||||
}
|
||||
}
|
||||
|
||||
extension String {
|
||||
init(avErrorCode code: Int32) {
|
||||
let buf = UnsafeMutablePointer<Int8>.allocate(capacity: Int(AV_ERROR_MAX_STRING_SIZE))
|
||||
buf.initialize(repeating: 0, count: Int(AV_ERROR_MAX_STRING_SIZE))
|
||||
defer { buf.deallocate() }
|
||||
self = String(cString: av_make_error_string(buf, Int(AV_ERROR_MAX_STRING_SIZE), code))
|
||||
}
|
||||
}
|
||||
|
||||
public extension NSError {
|
||||
convenience init(errorCode: KSPlayerErrorCode, avErrorCode: Int32) {
|
||||
let underlyingError = AVError(code: avErrorCode)
|
||||
self.init(errorCode: errorCode, userInfo: [NSUnderlyingErrorKey: underlyingError])
|
||||
}
|
||||
}
|
||||
|
||||
public extension AVError {
|
||||
/// Resource temporarily unavailable
|
||||
static let tryAgain = AVError(code: swift_AVERROR(EAGAIN))
|
||||
/// Invalid argument
|
||||
static let invalidArgument = AVError(code: swift_AVERROR(EINVAL))
|
||||
/// Cannot allocate memory
|
||||
static let outOfMemory = AVError(code: swift_AVERROR(ENOMEM))
|
||||
/// The value is out of range
|
||||
static let outOfRange = AVError(code: swift_AVERROR(ERANGE))
|
||||
/// The value is not valid
|
||||
static let invalidValue = AVError(code: swift_AVERROR(EINVAL))
|
||||
/// Function not implemented
|
||||
static let noSystem = AVError(code: swift_AVERROR(ENOSYS))
|
||||
|
||||
/// Bitstream filter not found
|
||||
static let bitstreamFilterNotFound = AVError(code: swift_AVERROR_BSF_NOT_FOUND)
|
||||
/// Internal bug, also see `bug2`
|
||||
static let bug = AVError(code: swift_AVERROR_BUG)
|
||||
/// Buffer too small
|
||||
static let bufferTooSmall = AVError(code: swift_AVERROR_BUFFER_TOO_SMALL)
|
||||
/// Decoder not found
|
||||
static let decoderNotFound = AVError(code: swift_AVERROR_DECODER_NOT_FOUND)
|
||||
/// Demuxer not found
|
||||
static let demuxerNotFound = AVError(code: swift_AVERROR_DEMUXER_NOT_FOUND)
|
||||
/// Encoder not found
|
||||
static let encoderNotFound = AVError(code: swift_AVERROR_ENCODER_NOT_FOUND)
|
||||
/// End of file
|
||||
static let eof = AVError(code: swift_AVERROR_EOF)
|
||||
/// Immediate exit was requested; the called function should not be restarted
|
||||
static let exit = AVError(code: swift_AVERROR_EXIT)
|
||||
/// Generic error in an external library
|
||||
static let external = AVError(code: swift_AVERROR_EXTERNAL)
|
||||
/// Filter not found
|
||||
static let filterNotFound = AVError(code: swift_AVERROR_FILTER_NOT_FOUND)
|
||||
/// Invalid data found when processing input
|
||||
static let invalidData = AVError(code: swift_AVERROR_INVALIDDATA)
|
||||
/// Muxer not found
|
||||
static let muxerNotFound = AVError(code: swift_AVERROR_MUXER_NOT_FOUND)
|
||||
/// Option not found
|
||||
static let optionNotFound = AVError(code: swift_AVERROR_OPTION_NOT_FOUND)
|
||||
/// Not yet implemented in FFmpeg, patches welcome
|
||||
static let patchWelcome = AVError(code: swift_AVERROR_PATCHWELCOME)
|
||||
/// Protocol not found
|
||||
static let protocolNotFound = AVError(code: swift_AVERROR_PROTOCOL_NOT_FOUND)
|
||||
/// Stream not found
|
||||
static let streamNotFound = AVError(code: swift_AVERROR_STREAM_NOT_FOUND)
|
||||
/// This is semantically identical to `bug`. It has been introduced in Libav after our `bug` and
|
||||
/// with a modified value.
|
||||
static let bug2 = AVError(code: swift_AVERROR_BUG2)
|
||||
/// Unknown error, typically from an external library
|
||||
static let unknown = AVError(code: swift_AVERROR_UNKNOWN)
|
||||
/// Requested feature is flagged experimental. Set strict_std_compliance if you really want to use it.
|
||||
static let experimental = AVError(code: swift_AVERROR_EXPERIMENTAL)
|
||||
/// Input changed between calls. Reconfiguration is required. (can be OR-ed with `outputChanged`)
|
||||
static let inputChanged = AVError(code: swift_AVERROR_INPUT_CHANGED)
|
||||
/// Output changed between calls. Reconfiguration is required. (can be OR-ed with `inputChanged`)
|
||||
static let outputChanged = AVError(code: swift_AVERROR_OUTPUT_CHANGED)
|
||||
|
||||
/* HTTP & RTSP errors */
|
||||
static let httpBadRequest = AVError(code: swift_AVERROR_HTTP_BAD_REQUEST)
|
||||
static let httpUnauthorized = AVError(code: swift_AVERROR_HTTP_UNAUTHORIZED)
|
||||
static let httpForbidden = AVError(code: swift_AVERROR_HTTP_FORBIDDEN)
|
||||
static let httpNotFound = AVError(code: swift_AVERROR_HTTP_NOT_FOUND)
|
||||
static let httpOther4xx = AVError(code: swift_AVERROR_HTTP_OTHER_4XX)
|
||||
static let httpServerError = AVError(code: swift_AVERROR_HTTP_SERVER_ERROR)
|
||||
}
|
||||
@@ -0,0 +1,333 @@
|
||||
//
|
||||
// AVFoundationExtension.swift
|
||||
//
|
||||
//
|
||||
// Created by kintan on 2023/1/9.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import CoreMedia
|
||||
import FFmpegKit
|
||||
import Libavutil
|
||||
|
||||
extension OSType {
|
||||
var bitDepth: Int32 {
|
||||
switch self {
|
||||
case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, kCVPixelFormatType_422YpCbCr10BiPlanarVideoRange, kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, kCVPixelFormatType_422YpCbCr10BiPlanarFullRange, kCVPixelFormatType_444YpCbCr10BiPlanarFullRange:
|
||||
return 10
|
||||
default:
|
||||
return 8
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension CVPixelBufferPool {
|
||||
static func create(width: Int32, height: Int32, bytesPerRowAlignment: Int32, pixelFormatType: OSType, bufferCount: Int = 24) -> CVPixelBufferPool? {
|
||||
let sourcePixelBufferOptions: NSMutableDictionary = [
|
||||
kCVPixelBufferPixelFormatTypeKey: pixelFormatType,
|
||||
kCVPixelBufferWidthKey: width,
|
||||
kCVPixelBufferHeightKey: height,
|
||||
kCVPixelBufferBytesPerRowAlignmentKey: bytesPerRowAlignment.alignment(value: 64),
|
||||
kCVPixelBufferMetalCompatibilityKey: true,
|
||||
kCVPixelBufferIOSurfacePropertiesKey: NSDictionary(),
|
||||
]
|
||||
var outputPool: CVPixelBufferPool?
|
||||
let pixelBufferPoolOptions: NSDictionary = [kCVPixelBufferPoolMinimumBufferCountKey: bufferCount]
|
||||
CVPixelBufferPoolCreate(kCFAllocatorDefault, pixelBufferPoolOptions, sourcePixelBufferOptions, &outputPool)
|
||||
return outputPool
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioUnit {
|
||||
var channelLayout: UnsafeMutablePointer<AudioChannelLayout> {
|
||||
var size = UInt32(0)
|
||||
AudioUnitGetPropertyInfo(self, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &size, nil)
|
||||
let data = UnsafeMutableRawPointer.allocate(byteCount: Int(size), alignment: MemoryLayout<Int8>.alignment)
|
||||
AudioUnitGetProperty(self, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, data, &size)
|
||||
let layout = data.bindMemory(to: AudioChannelLayout.self, capacity: 1)
|
||||
let tag = layout.pointee.mChannelLayoutTag
|
||||
KSLog("[audio] unit tag: \(tag)")
|
||||
if tag == kAudioChannelLayoutTag_UseChannelDescriptions {
|
||||
KSLog("[audio] unit channelDescriptions: \(layout.channelDescriptions)")
|
||||
return layout
|
||||
}
|
||||
if tag == kAudioChannelLayoutTag_UseChannelBitmap {
|
||||
return layout.pointee.mChannelBitmap.channelLayout
|
||||
} else {
|
||||
let layout = tag.channelLayout
|
||||
KSLog("[audio] unit channelDescriptions: \(layout.channelDescriptions)")
|
||||
return layout
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioChannelLayoutTag {
|
||||
var channelLayout: UnsafeMutablePointer<AudioChannelLayout> {
|
||||
var tag = self
|
||||
var size = UInt32(0)
|
||||
AudioFormatGetPropertyInfo(kAudioFormatProperty_ChannelLayoutForTag, UInt32(MemoryLayout<AudioChannelLayoutTag>.size), &tag, &size)
|
||||
let data = UnsafeMutableRawPointer.allocate(byteCount: Int(size), alignment: MemoryLayout<Int8>.alignment)
|
||||
AudioFormatGetProperty(kAudioFormatProperty_ChannelLayoutForTag, UInt32(MemoryLayout<AudioChannelLayoutTag>.size), &tag, &size, data)
|
||||
let newLayout = data.bindMemory(to: AudioChannelLayout.self, capacity: 1)
|
||||
newLayout.pointee.mChannelLayoutTag = kAudioChannelLayoutTag_UseChannelDescriptions
|
||||
return newLayout
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioChannelBitmap {
|
||||
var channelLayout: UnsafeMutablePointer<AudioChannelLayout> {
|
||||
var mChannelBitmap = self
|
||||
var size = UInt32(0)
|
||||
AudioFormatGetPropertyInfo(kAudioFormatProperty_ChannelLayoutForBitmap, UInt32(MemoryLayout<AudioChannelBitmap>.size), &mChannelBitmap, &size)
|
||||
let data = UnsafeMutableRawPointer.allocate(byteCount: Int(size), alignment: MemoryLayout<Int8>.alignment)
|
||||
AudioFormatGetProperty(kAudioFormatProperty_ChannelLayoutForBitmap, UInt32(MemoryLayout<AudioChannelBitmap>.size), &mChannelBitmap, &size, data)
|
||||
let newLayout = data.bindMemory(to: AudioChannelLayout.self, capacity: 1)
|
||||
newLayout.pointee.mChannelLayoutTag = kAudioChannelLayoutTag_UseChannelDescriptions
|
||||
return newLayout
|
||||
}
|
||||
}
|
||||
|
||||
extension UnsafePointer<AudioChannelLayout> {
|
||||
var channelDescriptions: [AudioChannelDescription] {
|
||||
UnsafeMutablePointer(mutating: self).channelDescriptions
|
||||
}
|
||||
}
|
||||
|
||||
extension UnsafeMutablePointer<AudioChannelLayout> {
|
||||
var channelDescriptions: [AudioChannelDescription] {
|
||||
let n = pointee.mNumberChannelDescriptions
|
||||
return withUnsafeMutablePointer(to: &pointee.mChannelDescriptions) { start in
|
||||
let buffers = UnsafeBufferPointer<AudioChannelDescription>(start: start, count: Int(n))
|
||||
return (0 ..< Int(n)).map {
|
||||
buffers[$0]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioChannelLayout: CustomStringConvertible {
|
||||
public var description: String {
|
||||
"AudioChannelLayoutTag: \(mChannelLayoutTag), mNumberChannelDescriptions: \(mNumberChannelDescriptions)"
|
||||
}
|
||||
}
|
||||
|
||||
extension AVAudioChannelLayout {
|
||||
func channelLayout() -> AVChannelLayout {
|
||||
KSLog("[audio] channelLayout: \(layout.pointee.description)")
|
||||
var mask: UInt64?
|
||||
if layoutTag == kAudioChannelLayoutTag_UseChannelDescriptions {
|
||||
var newMask = UInt64(0)
|
||||
for description in layout.channelDescriptions {
|
||||
let label = description.mChannelLabel
|
||||
KSLog("[audio] label: \(label)")
|
||||
let channel = label.avChannel.rawValue
|
||||
KSLog("[audio] avChannel: \(channel)")
|
||||
if channel >= 0 {
|
||||
newMask |= 1 << channel
|
||||
}
|
||||
}
|
||||
mask = newMask
|
||||
} else {
|
||||
mask = layoutMapTuple.first { tag, _ in
|
||||
tag == layoutTag
|
||||
}?.mask
|
||||
}
|
||||
var outChannel = AVChannelLayout()
|
||||
if let mask {
|
||||
// 不能用AV_CHANNEL_ORDER_CUSTOM
|
||||
av_channel_layout_from_mask(&outChannel, mask)
|
||||
} else {
|
||||
av_channel_layout_default(&outChannel, Int32(channelCount))
|
||||
}
|
||||
KSLog("[audio] out mask: \(outChannel.u.mask) nb_channels: \(outChannel.nb_channels)")
|
||||
return outChannel
|
||||
}
|
||||
|
||||
public var channelDescriptions: String {
|
||||
"tag: \(layoutTag), channelDescriptions: \(layout.channelDescriptions)"
|
||||
}
|
||||
}
|
||||
|
||||
extension AVAudioFormat {
|
||||
var sampleFormat: AVSampleFormat {
|
||||
switch commonFormat {
|
||||
case .pcmFormatFloat32:
|
||||
return isInterleaved ? AV_SAMPLE_FMT_FLT : AV_SAMPLE_FMT_FLTP
|
||||
case .pcmFormatFloat64:
|
||||
return isInterleaved ? AV_SAMPLE_FMT_DBL : AV_SAMPLE_FMT_DBLP
|
||||
case .pcmFormatInt16:
|
||||
return isInterleaved ? AV_SAMPLE_FMT_S16 : AV_SAMPLE_FMT_S16P
|
||||
case .pcmFormatInt32:
|
||||
return isInterleaved ? AV_SAMPLE_FMT_S32 : AV_SAMPLE_FMT_S32P
|
||||
case .otherFormat:
|
||||
return isInterleaved ? AV_SAMPLE_FMT_FLT : AV_SAMPLE_FMT_FLTP
|
||||
@unknown default:
|
||||
return isInterleaved ? AV_SAMPLE_FMT_FLT : AV_SAMPLE_FMT_FLTP
|
||||
}
|
||||
}
|
||||
|
||||
var sampleSize: UInt32 {
|
||||
switch commonFormat {
|
||||
case .pcmFormatFloat32:
|
||||
return isInterleaved ? channelCount * 4 : 4
|
||||
case .pcmFormatFloat64:
|
||||
return isInterleaved ? channelCount * 8 : 8
|
||||
case .pcmFormatInt16:
|
||||
return isInterleaved ? channelCount * 2 : 2
|
||||
case .pcmFormatInt32:
|
||||
return isInterleaved ? channelCount * 4 : 4
|
||||
case .otherFormat:
|
||||
return isInterleaved ? channelCount * 4 : channelCount * 4
|
||||
@unknown default:
|
||||
return isInterleaved ? channelCount * 4 : channelCount * 4
|
||||
}
|
||||
}
|
||||
|
||||
func isChannelEqual(_ object: AVAudioFormat) -> Bool {
|
||||
sampleRate == object.sampleRate && channelCount == object.channelCount && commonFormat == object.commonFormat && sampleRate == object.sampleRate && isInterleaved == object.isInterleaved
|
||||
}
|
||||
}
|
||||
|
||||
let layoutMapTuple =
|
||||
[(tag: kAudioChannelLayoutTag_Mono, mask: swift_AV_CH_LAYOUT_MONO),
|
||||
(tag: kAudioChannelLayoutTag_Stereo, mask: swift_AV_CH_LAYOUT_STEREO),
|
||||
(tag: kAudioChannelLayoutTag_WAVE_2_1, mask: swift_AV_CH_LAYOUT_2POINT1),
|
||||
(tag: kAudioChannelLayoutTag_ITU_2_1, mask: swift_AV_CH_LAYOUT_2_1),
|
||||
(tag: kAudioChannelLayoutTag_MPEG_3_0_A, mask: swift_AV_CH_LAYOUT_SURROUND),
|
||||
(tag: kAudioChannelLayoutTag_DVD_10, mask: swift_AV_CH_LAYOUT_3POINT1),
|
||||
(tag: kAudioChannelLayoutTag_Logic_4_0_A, mask: swift_AV_CH_LAYOUT_4POINT0),
|
||||
(tag: kAudioChannelLayoutTag_Logic_Quadraphonic, mask: swift_AV_CH_LAYOUT_2_2),
|
||||
(tag: kAudioChannelLayoutTag_WAVE_4_0_B, mask: swift_AV_CH_LAYOUT_QUAD),
|
||||
(tag: kAudioChannelLayoutTag_DVD_11, mask: swift_AV_CH_LAYOUT_4POINT1),
|
||||
(tag: kAudioChannelLayoutTag_Logic_5_0_A, mask: swift_AV_CH_LAYOUT_5POINT0),
|
||||
(tag: kAudioChannelLayoutTag_WAVE_5_0_B, mask: swift_AV_CH_LAYOUT_5POINT0_BACK),
|
||||
(tag: kAudioChannelLayoutTag_Logic_5_1_A, mask: swift_AV_CH_LAYOUT_5POINT1),
|
||||
(tag: kAudioChannelLayoutTag_WAVE_5_1_B, mask: swift_AV_CH_LAYOUT_5POINT1_BACK),
|
||||
(tag: kAudioChannelLayoutTag_Logic_6_0_A, mask: swift_AV_CH_LAYOUT_6POINT0),
|
||||
(tag: kAudioChannelLayoutTag_DTS_6_0_A, mask: swift_AV_CH_LAYOUT_6POINT0_FRONT),
|
||||
(tag: kAudioChannelLayoutTag_DTS_6_0_C, mask: swift_AV_CH_LAYOUT_HEXAGONAL),
|
||||
(tag: kAudioChannelLayoutTag_Logic_6_1_C, mask: swift_AV_CH_LAYOUT_6POINT1),
|
||||
(tag: kAudioChannelLayoutTag_DTS_6_1_A, mask: swift_AV_CH_LAYOUT_6POINT1_FRONT),
|
||||
(tag: kAudioChannelLayoutTag_DTS_6_1_C, mask: swift_AV_CH_LAYOUT_6POINT1_BACK),
|
||||
(tag: kAudioChannelLayoutTag_AAC_7_0, mask: swift_AV_CH_LAYOUT_7POINT0),
|
||||
(tag: kAudioChannelLayoutTag_Logic_7_1_A, mask: swift_AV_CH_LAYOUT_7POINT1),
|
||||
(tag: kAudioChannelLayoutTag_Logic_7_1_SDDS_A, mask: swift_AV_CH_LAYOUT_7POINT1_WIDE),
|
||||
(tag: kAudioChannelLayoutTag_AAC_Octagonal, mask: swift_AV_CH_LAYOUT_OCTAGONAL),
|
||||
// (tag: kAudioChannelLayoutTag_Logic_Atmos_5_1_2, mask: swift_AV_CH_LAYOUT_7POINT1_WIDE_BACK),
|
||||
]
|
||||
|
||||
// Some channel abbreviations used below:
|
||||
// Lss - left side surround
|
||||
// Rss - right side surround
|
||||
// Leos - Left edge of screen
|
||||
// Reos - Right edge of screen
|
||||
// Lbs - Left back surround
|
||||
// Rbs - Right back surround
|
||||
// Lt - left matrix total. for matrix encoded stereo.
|
||||
// Rt - right matrix total. for matrix encoded stereo.
|
||||
|
||||
extension AudioChannelLabel {
|
||||
var avChannel: AVChannel {
|
||||
switch self {
|
||||
case kAudioChannelLabel_Left:
|
||||
// L - left
|
||||
return AV_CHAN_FRONT_LEFT
|
||||
case kAudioChannelLabel_Right:
|
||||
// R - right
|
||||
return AV_CHAN_FRONT_RIGHT
|
||||
case kAudioChannelLabel_Center:
|
||||
// C - center
|
||||
return AV_CHAN_FRONT_CENTER
|
||||
case kAudioChannelLabel_LFEScreen:
|
||||
// Lfe
|
||||
return AV_CHAN_LOW_FREQUENCY
|
||||
case kAudioChannelLabel_LeftSurround:
|
||||
// Ls - left surround
|
||||
return AV_CHAN_SIDE_LEFT
|
||||
case kAudioChannelLabel_RightSurround:
|
||||
// Rs - right surround
|
||||
return AV_CHAN_SIDE_RIGHT
|
||||
case kAudioChannelLabel_LeftCenter:
|
||||
// Lc - left center
|
||||
return AV_CHAN_FRONT_LEFT_OF_CENTER
|
||||
case kAudioChannelLabel_RightCenter:
|
||||
// Rc - right center
|
||||
return AV_CHAN_FRONT_RIGHT_OF_CENTER
|
||||
case kAudioChannelLabel_CenterSurround:
|
||||
// Cs - center surround "Back Center" or plain "Rear Surround"
|
||||
return AV_CHAN_BACK_CENTER
|
||||
case kAudioChannelLabel_LeftSurroundDirect:
|
||||
// Lsd - left surround direct
|
||||
return AV_CHAN_SURROUND_DIRECT_LEFT
|
||||
case kAudioChannelLabel_RightSurroundDirect:
|
||||
// Rsd - right surround direct
|
||||
return AV_CHAN_SURROUND_DIRECT_RIGHT
|
||||
case kAudioChannelLabel_TopCenterSurround:
|
||||
// Ts - top surround
|
||||
return AV_CHAN_TOP_CENTER
|
||||
case kAudioChannelLabel_VerticalHeightLeft:
|
||||
// Vhl - vertical height left Top Front Left
|
||||
return AV_CHAN_TOP_FRONT_LEFT
|
||||
case kAudioChannelLabel_VerticalHeightCenter:
|
||||
// Vhc - vertical height center Top Front Center
|
||||
return AV_CHAN_TOP_FRONT_CENTER
|
||||
case kAudioChannelLabel_VerticalHeightRight:
|
||||
// Vhr - vertical height right Top Front right
|
||||
return AV_CHAN_TOP_FRONT_RIGHT
|
||||
case kAudioChannelLabel_TopBackLeft:
|
||||
// Ltr - left top rear
|
||||
return AV_CHAN_TOP_BACK_LEFT
|
||||
case kAudioChannelLabel_TopBackCenter:
|
||||
// Ctr - center top rear
|
||||
return AV_CHAN_TOP_BACK_CENTER
|
||||
case kAudioChannelLabel_TopBackRight:
|
||||
// Rtr - right top rear
|
||||
return AV_CHAN_TOP_BACK_RIGHT
|
||||
case kAudioChannelLabel_RearSurroundLeft:
|
||||
// Rls - rear left surround
|
||||
return AV_CHAN_BACK_LEFT
|
||||
case kAudioChannelLabel_RearSurroundRight:
|
||||
// Rrs - rear right surround
|
||||
return AV_CHAN_BACK_RIGHT
|
||||
case kAudioChannelLabel_LeftWide:
|
||||
// Lw - left wide
|
||||
return AV_CHAN_WIDE_LEFT
|
||||
case kAudioChannelLabel_RightWide:
|
||||
// Rw - right wide
|
||||
return AV_CHAN_WIDE_RIGHT
|
||||
case kAudioChannelLabel_LFE2:
|
||||
// LFE2
|
||||
return AV_CHAN_LOW_FREQUENCY_2
|
||||
case kAudioChannelLabel_Mono:
|
||||
// C - center
|
||||
return AV_CHAN_FRONT_CENTER
|
||||
case kAudioChannelLabel_LeftTopMiddle:
|
||||
// Ltm - left top middle
|
||||
return AV_CHAN_NONE
|
||||
case kAudioChannelLabel_RightTopMiddle:
|
||||
// Rtm - right top middle
|
||||
return AV_CHAN_NONE
|
||||
case kAudioChannelLabel_LeftTopSurround:
|
||||
// Lts - Left top surround
|
||||
return AV_CHAN_TOP_SIDE_LEFT
|
||||
case kAudioChannelLabel_RightTopSurround:
|
||||
// Rts - Right top surround
|
||||
return AV_CHAN_TOP_SIDE_RIGHT
|
||||
case kAudioChannelLabel_LeftBottom:
|
||||
// Lb - left bottom
|
||||
return AV_CHAN_BOTTOM_FRONT_LEFT
|
||||
case kAudioChannelLabel_RightBottom:
|
||||
// Rb - Right bottom
|
||||
return AV_CHAN_BOTTOM_FRONT_RIGHT
|
||||
case kAudioChannelLabel_CenterBottom:
|
||||
// Cb - Center bottom
|
||||
return AV_CHAN_BOTTOM_FRONT_CENTER
|
||||
case kAudioChannelLabel_HeadphonesLeft:
|
||||
return AV_CHAN_STEREO_LEFT
|
||||
case kAudioChannelLabel_HeadphonesRight:
|
||||
return AV_CHAN_STEREO_RIGHT
|
||||
default:
|
||||
return AV_CHAN_NONE
|
||||
}
|
||||
}
|
||||
}
|
||||
338
KSPlayer-main/Sources/KSPlayer/MEPlayer/AudioEnginePlayer.swift
Normal file
338
KSPlayer-main/Sources/KSPlayer/MEPlayer/AudioEnginePlayer.swift
Normal file
@@ -0,0 +1,338 @@
|
||||
//
|
||||
// AudioEnginePlayer.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/11.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import CoreAudio
|
||||
|
||||
public protocol AudioOutput: FrameOutput {
|
||||
var playbackRate: Float { get set }
|
||||
var volume: Float { get set }
|
||||
var isMuted: Bool { get set }
|
||||
init()
|
||||
func prepare(audioFormat: AVAudioFormat)
|
||||
}
|
||||
|
||||
public protocol AudioDynamicsProcessor {
|
||||
var audioUnitForDynamicsProcessor: AudioUnit { get }
|
||||
}
|
||||
|
||||
public extension AudioDynamicsProcessor {
|
||||
var attackTime: Float {
|
||||
get {
|
||||
var value = AudioUnitParameterValue(1.0)
|
||||
AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_AttackTime, kAudioUnitScope_Global, 0, &value)
|
||||
return value
|
||||
}
|
||||
set {
|
||||
AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_AttackTime, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0)
|
||||
}
|
||||
}
|
||||
|
||||
var releaseTime: Float {
|
||||
get {
|
||||
var value = AudioUnitParameterValue(1.0)
|
||||
AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ReleaseTime, kAudioUnitScope_Global, 0, &value)
|
||||
return value
|
||||
}
|
||||
set {
|
||||
AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ReleaseTime, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0)
|
||||
}
|
||||
}
|
||||
|
||||
var threshold: Float {
|
||||
get {
|
||||
var value = AudioUnitParameterValue(1.0)
|
||||
AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_Threshold, kAudioUnitScope_Global, 0, &value)
|
||||
return value
|
||||
}
|
||||
set {
|
||||
AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_Threshold, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0)
|
||||
}
|
||||
}
|
||||
|
||||
var expansionRatio: Float {
|
||||
get {
|
||||
var value = AudioUnitParameterValue(1.0)
|
||||
AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ExpansionRatio, kAudioUnitScope_Global, 0, &value)
|
||||
return value
|
||||
}
|
||||
set {
|
||||
AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ExpansionRatio, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0)
|
||||
}
|
||||
}
|
||||
|
||||
var overallGain: Float {
|
||||
get {
|
||||
var value = AudioUnitParameterValue(1.0)
|
||||
AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_OverallGain, kAudioUnitScope_Global, 0, &value)
|
||||
return value
|
||||
}
|
||||
set {
|
||||
AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_OverallGain, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final class AudioEngineDynamicsPlayer: AudioEnginePlayer, AudioDynamicsProcessor {
|
||||
private let dynamicsProcessor = AVAudioUnitEffect(audioComponentDescription:
|
||||
AudioComponentDescription(componentType: kAudioUnitType_Effect,
|
||||
componentSubType: kAudioUnitSubType_DynamicsProcessor,
|
||||
componentManufacturer: kAudioUnitManufacturer_Apple,
|
||||
componentFlags: 0,
|
||||
componentFlagsMask: 0))
|
||||
public var audioUnitForDynamicsProcessor: AudioUnit {
|
||||
dynamicsProcessor.audioUnit
|
||||
}
|
||||
|
||||
override func audioNodes() -> [AVAudioNode] {
|
||||
var nodes: [AVAudioNode] = [dynamicsProcessor]
|
||||
nodes.append(contentsOf: super.audioNodes())
|
||||
return nodes
|
||||
}
|
||||
|
||||
public required init() {
|
||||
super.init()
|
||||
engine.attach(dynamicsProcessor)
|
||||
}
|
||||
}
|
||||
|
||||
public class AudioEnginePlayer: AudioOutput {
|
||||
public let engine = AVAudioEngine()
|
||||
private var sourceNode: AVAudioSourceNode?
|
||||
private var sourceNodeAudioFormat: AVAudioFormat?
|
||||
|
||||
// private let reverb = AVAudioUnitReverb()
|
||||
// private let nbandEQ = AVAudioUnitEQ()
|
||||
// private let distortion = AVAudioUnitDistortion()
|
||||
// private let delay = AVAudioUnitDelay()
|
||||
private let timePitch = AVAudioUnitTimePitch()
|
||||
private var sampleSize = UInt32(MemoryLayout<Float>.size)
|
||||
private var currentRenderReadOffset = UInt32(0)
|
||||
private var outputLatency = TimeInterval(0)
|
||||
public weak var renderSource: OutputRenderSourceDelegate?
|
||||
private var currentRender: AudioFrame? {
|
||||
didSet {
|
||||
if currentRender == nil {
|
||||
currentRenderReadOffset = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var playbackRate: Float {
|
||||
get {
|
||||
timePitch.rate
|
||||
}
|
||||
set {
|
||||
timePitch.rate = min(32, max(1 / 32, newValue))
|
||||
}
|
||||
}
|
||||
|
||||
public var volume: Float {
|
||||
get {
|
||||
sourceNode?.volume ?? 1
|
||||
}
|
||||
set {
|
||||
sourceNode?.volume = newValue
|
||||
}
|
||||
}
|
||||
|
||||
public var isMuted: Bool {
|
||||
get {
|
||||
engine.mainMixerNode.outputVolume == 0.0
|
||||
}
|
||||
set {
|
||||
engine.mainMixerNode.outputVolume = newValue ? 0.0 : 1.0
|
||||
}
|
||||
}
|
||||
|
||||
public required init() {
|
||||
engine.attach(timePitch)
|
||||
if let audioUnit = engine.outputNode.audioUnit {
|
||||
addRenderNotify(audioUnit: audioUnit)
|
||||
}
|
||||
#if !os(macOS)
|
||||
outputLatency = AVAudioSession.sharedInstance().outputLatency
|
||||
#endif
|
||||
}
|
||||
|
||||
public func prepare(audioFormat: AVAudioFormat) {
|
||||
if sourceNodeAudioFormat == audioFormat {
|
||||
return
|
||||
}
|
||||
sourceNodeAudioFormat = audioFormat
|
||||
#if !os(macOS)
|
||||
try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(Int(audioFormat.channelCount))
|
||||
KSLog("[audio] set preferredOutputNumberOfChannels: \(audioFormat.channelCount)")
|
||||
#endif
|
||||
KSLog("[audio] outputFormat AudioFormat: \(audioFormat)")
|
||||
if let channelLayout = audioFormat.channelLayout {
|
||||
KSLog("[audio] outputFormat channelLayout \(channelLayout.channelDescriptions)")
|
||||
}
|
||||
let isRunning = engine.isRunning
|
||||
engine.stop()
|
||||
engine.reset()
|
||||
sourceNode = AVAudioSourceNode(format: audioFormat) { [weak self] _, timestamp, frameCount, audioBufferList in
|
||||
if timestamp.pointee.mSampleTime == 0 {
|
||||
return noErr
|
||||
}
|
||||
self?.audioPlayerShouldInputData(ioData: UnsafeMutableAudioBufferListPointer(audioBufferList), numberOfFrames: frameCount)
|
||||
return noErr
|
||||
}
|
||||
guard let sourceNode else {
|
||||
return
|
||||
}
|
||||
KSLog("[audio] new sourceNode inputFormat: \(sourceNode.inputFormat(forBus: 0))")
|
||||
sampleSize = audioFormat.sampleSize
|
||||
engine.attach(sourceNode)
|
||||
var nodes: [AVAudioNode] = [sourceNode]
|
||||
nodes.append(contentsOf: audioNodes())
|
||||
if audioFormat.channelCount > 2 {
|
||||
nodes.append(engine.outputNode)
|
||||
}
|
||||
// 一定要传入format,这样多音轨音响才不会有问题。
|
||||
engine.connect(nodes: nodes, format: audioFormat)
|
||||
engine.prepare()
|
||||
if isRunning {
|
||||
try? engine.start()
|
||||
// 从多声道切换到2声道马上调用start会不生效。需要异步主线程才可以
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.play()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func audioNodes() -> [AVAudioNode] {
|
||||
[timePitch, engine.mainMixerNode]
|
||||
}
|
||||
|
||||
public func play() {
|
||||
if !engine.isRunning {
|
||||
do {
|
||||
try engine.start()
|
||||
} catch {
|
||||
KSLog(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func pause() {
|
||||
if engine.isRunning {
|
||||
engine.pause()
|
||||
}
|
||||
}
|
||||
|
||||
public func flush() {
|
||||
currentRender = nil
|
||||
#if !os(macOS)
|
||||
// 这个要在主线程执行,如果在音频的线程,那就会有中断杂音
|
||||
outputLatency = AVAudioSession.sharedInstance().outputLatency
|
||||
#endif
|
||||
}
|
||||
|
||||
private func addRenderNotify(audioUnit: AudioUnit) {
|
||||
AudioUnitAddRenderNotify(audioUnit, { refCon, ioActionFlags, inTimeStamp, _, _, _ in
|
||||
let `self` = Unmanaged<AudioEnginePlayer>.fromOpaque(refCon).takeUnretainedValue()
|
||||
autoreleasepool {
|
||||
if ioActionFlags.pointee.contains(.unitRenderAction_PostRender) {
|
||||
self.audioPlayerDidRenderSample(sampleTimestamp: inTimeStamp.pointee)
|
||||
}
|
||||
}
|
||||
return noErr
|
||||
}, Unmanaged.passUnretained(self).toOpaque())
|
||||
}
|
||||
|
||||
// private func addRenderCallback(audioUnit: AudioUnit, streamDescription: UnsafePointer<AudioStreamBasicDescription>) {
|
||||
// _ = AudioUnitSetProperty(audioUnit,
|
||||
// kAudioUnitProperty_StreamFormat,
|
||||
// kAudioUnitScope_Input,
|
||||
// 0,
|
||||
// streamDescription,
|
||||
// UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
|
||||
// var inputCallbackStruct = AURenderCallbackStruct()
|
||||
// inputCallbackStruct.inputProcRefCon = Unmanaged.passUnretained(self).toOpaque()
|
||||
// inputCallbackStruct.inputProc = { refCon, _, _, _, inNumberFrames, ioData in
|
||||
// guard let ioData else {
|
||||
// return noErr
|
||||
// }
|
||||
// let `self` = Unmanaged<AudioEnginePlayer>.fromOpaque(refCon).takeUnretainedValue()
|
||||
// self.audioPlayerShouldInputData(ioData: UnsafeMutableAudioBufferListPointer(ioData), numberOfFrames: inNumberFrames)
|
||||
// return noErr
|
||||
// }
|
||||
// _ = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputCallbackStruct, UInt32(MemoryLayout<AURenderCallbackStruct>.size))
|
||||
// }
|
||||
|
||||
private func audioPlayerShouldInputData(ioData: UnsafeMutableAudioBufferListPointer, numberOfFrames: UInt32) {
|
||||
var ioDataWriteOffset = 0
|
||||
var numberOfSamples = numberOfFrames
|
||||
while numberOfSamples > 0 {
|
||||
if currentRender == nil {
|
||||
currentRender = renderSource?.getAudioOutputRender()
|
||||
}
|
||||
guard let currentRender else {
|
||||
break
|
||||
}
|
||||
let residueLinesize = currentRender.numberOfSamples - currentRenderReadOffset
|
||||
guard residueLinesize > 0 else {
|
||||
self.currentRender = nil
|
||||
continue
|
||||
}
|
||||
if sourceNodeAudioFormat != currentRender.audioFormat {
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.prepare(audioFormat: currentRender.audioFormat)
|
||||
}
|
||||
return
|
||||
}
|
||||
let framesToCopy = min(numberOfSamples, residueLinesize)
|
||||
let bytesToCopy = Int(framesToCopy * sampleSize)
|
||||
let offset = Int(currentRenderReadOffset * sampleSize)
|
||||
for i in 0 ..< min(ioData.count, currentRender.data.count) {
|
||||
if let source = currentRender.data[i], let destination = ioData[i].mData {
|
||||
(destination + ioDataWriteOffset).copyMemory(from: source + offset, byteCount: bytesToCopy)
|
||||
}
|
||||
}
|
||||
numberOfSamples -= framesToCopy
|
||||
ioDataWriteOffset += bytesToCopy
|
||||
currentRenderReadOffset += framesToCopy
|
||||
}
|
||||
let sizeCopied = (numberOfFrames - numberOfSamples) * sampleSize
|
||||
for i in 0 ..< ioData.count {
|
||||
let sizeLeft = Int(ioData[i].mDataByteSize - sizeCopied)
|
||||
if sizeLeft > 0 {
|
||||
memset(ioData[i].mData! + Int(sizeCopied), 0, sizeLeft)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func audioPlayerDidRenderSample(sampleTimestamp _: AudioTimeStamp) {
|
||||
if let currentRender {
|
||||
let currentPreparePosition = currentRender.timestamp + currentRender.duration * Int64(currentRenderReadOffset) / Int64(currentRender.numberOfSamples)
|
||||
if currentPreparePosition > 0 {
|
||||
var time = currentRender.timebase.cmtime(for: currentPreparePosition)
|
||||
if outputLatency != 0 {
|
||||
/// AVSampleBufferAudioRenderer不需要处理outputLatency。其他音频输出的都要处理。
|
||||
/// 没有蓝牙的话,outputLatency为0.015,有蓝牙耳机的话为0.176
|
||||
time = time - CMTime(seconds: outputLatency, preferredTimescale: time.timescale)
|
||||
}
|
||||
renderSource?.setAudio(time: time, position: currentRender.position)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AVAudioEngine {
|
||||
func connect(nodes: [AVAudioNode], format: AVAudioFormat?) {
|
||||
if nodes.count < 2 {
|
||||
return
|
||||
}
|
||||
for i in 0 ..< nodes.count - 1 {
|
||||
connect(nodes[i], to: nodes[i + 1], format: format)
|
||||
}
|
||||
}
|
||||
}
|
||||
303
KSPlayer-main/Sources/KSPlayer/MEPlayer/AudioGraphPlayer.swift
Normal file
303
KSPlayer-main/Sources/KSPlayer/MEPlayer/AudioGraphPlayer.swift
Normal file
@@ -0,0 +1,303 @@
|
||||
//
|
||||
// AudioGraphPlayer.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/16.
|
||||
//
|
||||
|
||||
import AudioToolbox
|
||||
import AVFAudio
|
||||
import CoreAudio
|
||||
|
||||
public final class AudioGraphPlayer: AudioOutput, AudioDynamicsProcessor {
|
||||
public private(set) var audioUnitForDynamicsProcessor: AudioUnit
|
||||
private let graph: AUGraph
|
||||
private var audioUnitForMixer: AudioUnit!
|
||||
private var audioUnitForTimePitch: AudioUnit!
|
||||
private var audioUnitForOutput: AudioUnit!
|
||||
private var currentRenderReadOffset = UInt32(0)
|
||||
private var sourceNodeAudioFormat: AVAudioFormat?
|
||||
private var sampleSize = UInt32(MemoryLayout<Float>.size)
|
||||
#if os(macOS)
|
||||
private var volumeBeforeMute: Float = 0.0
|
||||
#endif
|
||||
private var outputLatency = TimeInterval(0)
|
||||
public weak var renderSource: OutputRenderSourceDelegate?
|
||||
private var currentRender: AudioFrame? {
|
||||
didSet {
|
||||
if currentRender == nil {
|
||||
currentRenderReadOffset = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func play() {
|
||||
AUGraphStart(graph)
|
||||
}
|
||||
|
||||
public func pause() {
|
||||
AUGraphStop(graph)
|
||||
}
|
||||
|
||||
public var playbackRate: Float {
|
||||
get {
|
||||
var playbackRate = AudioUnitParameterValue(0.0)
|
||||
AudioUnitGetParameter(audioUnitForTimePitch, kNewTimePitchParam_Rate, kAudioUnitScope_Global, 0, &playbackRate)
|
||||
return playbackRate
|
||||
}
|
||||
set {
|
||||
AudioUnitSetParameter(audioUnitForTimePitch, kNewTimePitchParam_Rate, kAudioUnitScope_Global, 0, newValue, 0)
|
||||
}
|
||||
}
|
||||
|
||||
public var volume: Float {
|
||||
get {
|
||||
var volume = AudioUnitParameterValue(0.0)
|
||||
#if os(macOS)
|
||||
let inID = kStereoMixerParam_Volume
|
||||
#else
|
||||
let inID = kMultiChannelMixerParam_Volume
|
||||
#endif
|
||||
AudioUnitGetParameter(audioUnitForMixer, inID, kAudioUnitScope_Input, 0, &volume)
|
||||
return volume
|
||||
}
|
||||
set {
|
||||
#if os(macOS)
|
||||
let inID = kStereoMixerParam_Volume
|
||||
#else
|
||||
let inID = kMultiChannelMixerParam_Volume
|
||||
#endif
|
||||
AudioUnitSetParameter(audioUnitForMixer, inID, kAudioUnitScope_Input, 0, newValue, 0)
|
||||
}
|
||||
}
|
||||
|
||||
public var isMuted: Bool {
|
||||
get {
|
||||
var value = AudioUnitParameterValue(1.0)
|
||||
#if os(macOS)
|
||||
AudioUnitGetParameter(audioUnitForMixer, kStereoMixerParam_Volume, kAudioUnitScope_Input, 0, &value)
|
||||
#else
|
||||
AudioUnitGetParameter(audioUnitForMixer, kMultiChannelMixerParam_Enable, kAudioUnitScope_Input, 0, &value)
|
||||
#endif
|
||||
return value == 0
|
||||
}
|
||||
set {
|
||||
let value = newValue ? 0 : 1
|
||||
#if os(macOS)
|
||||
if value == 0 {
|
||||
volumeBeforeMute = volume
|
||||
}
|
||||
AudioUnitSetParameter(audioUnitForMixer, kStereoMixerParam_Volume, kAudioUnitScope_Input, 0, min(Float(value), volumeBeforeMute), 0)
|
||||
#else
|
||||
AudioUnitSetParameter(audioUnitForMixer, kMultiChannelMixerParam_Enable, kAudioUnitScope_Input, 0, AudioUnitParameterValue(value), 0)
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
public init() {
|
||||
var newGraph: AUGraph!
|
||||
NewAUGraph(&newGraph)
|
||||
graph = newGraph
|
||||
var descriptionForTimePitch = AudioComponentDescription()
|
||||
descriptionForTimePitch.componentType = kAudioUnitType_FormatConverter
|
||||
descriptionForTimePitch.componentSubType = kAudioUnitSubType_NewTimePitch
|
||||
descriptionForTimePitch.componentManufacturer = kAudioUnitManufacturer_Apple
|
||||
var descriptionForDynamicsProcessor = AudioComponentDescription()
|
||||
descriptionForDynamicsProcessor.componentType = kAudioUnitType_Effect
|
||||
descriptionForDynamicsProcessor.componentManufacturer = kAudioUnitManufacturer_Apple
|
||||
descriptionForDynamicsProcessor.componentSubType = kAudioUnitSubType_DynamicsProcessor
|
||||
var descriptionForMixer = AudioComponentDescription()
|
||||
descriptionForMixer.componentType = kAudioUnitType_Mixer
|
||||
descriptionForMixer.componentManufacturer = kAudioUnitManufacturer_Apple
|
||||
#if os(macOS)
|
||||
descriptionForMixer.componentSubType = kAudioUnitSubType_StereoMixer
|
||||
#else
|
||||
descriptionForMixer.componentSubType = kAudioUnitSubType_MultiChannelMixer
|
||||
#endif
|
||||
var descriptionForOutput = AudioComponentDescription()
|
||||
descriptionForOutput.componentType = kAudioUnitType_Output
|
||||
descriptionForOutput.componentManufacturer = kAudioUnitManufacturer_Apple
|
||||
#if os(macOS)
|
||||
descriptionForOutput.componentSubType = kAudioUnitSubType_DefaultOutput
|
||||
#else
|
||||
descriptionForOutput.componentSubType = kAudioUnitSubType_RemoteIO
|
||||
#endif
|
||||
var nodeForTimePitch = AUNode()
|
||||
var nodeForDynamicsProcessor = AUNode()
|
||||
var nodeForMixer = AUNode()
|
||||
var nodeForOutput = AUNode()
|
||||
AUGraphAddNode(graph, &descriptionForTimePitch, &nodeForTimePitch)
|
||||
AUGraphAddNode(graph, &descriptionForMixer, &nodeForMixer)
|
||||
AUGraphAddNode(graph, &descriptionForDynamicsProcessor, &nodeForDynamicsProcessor)
|
||||
AUGraphAddNode(graph, &descriptionForOutput, &nodeForOutput)
|
||||
AUGraphOpen(graph)
|
||||
AUGraphConnectNodeInput(graph, nodeForTimePitch, 0, nodeForDynamicsProcessor, 0)
|
||||
AUGraphConnectNodeInput(graph, nodeForDynamicsProcessor, 0, nodeForMixer, 0)
|
||||
AUGraphConnectNodeInput(graph, nodeForMixer, 0, nodeForOutput, 0)
|
||||
AUGraphNodeInfo(graph, nodeForTimePitch, &descriptionForTimePitch, &audioUnitForTimePitch)
|
||||
var audioUnitForDynamicsProcessor: AudioUnit?
|
||||
AUGraphNodeInfo(graph, nodeForDynamicsProcessor, &descriptionForDynamicsProcessor, &audioUnitForDynamicsProcessor)
|
||||
self.audioUnitForDynamicsProcessor = audioUnitForDynamicsProcessor!
|
||||
AUGraphNodeInfo(graph, nodeForMixer, &descriptionForMixer, &audioUnitForMixer)
|
||||
AUGraphNodeInfo(graph, nodeForOutput, &descriptionForOutput, &audioUnitForOutput)
|
||||
addRenderNotify(audioUnit: audioUnitForOutput)
|
||||
var value = UInt32(1)
|
||||
AudioUnitSetProperty(audioUnitForTimePitch,
|
||||
kAudioOutputUnitProperty_EnableIO,
|
||||
kAudioUnitScope_Output, 0,
|
||||
&value,
|
||||
UInt32(MemoryLayout<UInt32>.size))
|
||||
#if !os(macOS)
|
||||
outputLatency = AVAudioSession.sharedInstance().outputLatency
|
||||
#endif
|
||||
}
|
||||
|
||||
public func prepare(audioFormat: AVAudioFormat) {
|
||||
if sourceNodeAudioFormat == audioFormat {
|
||||
return
|
||||
}
|
||||
sourceNodeAudioFormat = audioFormat
|
||||
#if !os(macOS)
|
||||
try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(Int(audioFormat.channelCount))
|
||||
KSLog("[audio] set preferredOutputNumberOfChannels: \(audioFormat.channelCount)")
|
||||
#endif
|
||||
sampleSize = audioFormat.sampleSize
|
||||
var audioStreamBasicDescription = audioFormat.formatDescription.audioStreamBasicDescription
|
||||
let audioStreamBasicDescriptionSize = UInt32(MemoryLayout<AudioStreamBasicDescription>.size)
|
||||
let channelLayout = audioFormat.channelLayout?.layout
|
||||
for unit in [audioUnitForTimePitch, audioUnitForDynamicsProcessor, audioUnitForMixer, audioUnitForOutput] {
|
||||
guard let unit else { continue }
|
||||
AudioUnitSetProperty(unit,
|
||||
kAudioUnitProperty_StreamFormat,
|
||||
kAudioUnitScope_Input, 0,
|
||||
&audioStreamBasicDescription,
|
||||
audioStreamBasicDescriptionSize)
|
||||
AudioUnitSetProperty(unit,
|
||||
kAudioUnitProperty_AudioChannelLayout,
|
||||
kAudioUnitScope_Input, 0,
|
||||
channelLayout,
|
||||
UInt32(MemoryLayout<AudioChannelLayout>.size))
|
||||
if unit != audioUnitForOutput {
|
||||
AudioUnitSetProperty(unit,
|
||||
kAudioUnitProperty_StreamFormat,
|
||||
kAudioUnitScope_Output, 0,
|
||||
&audioStreamBasicDescription,
|
||||
audioStreamBasicDescriptionSize)
|
||||
AudioUnitSetProperty(unit,
|
||||
kAudioUnitProperty_AudioChannelLayout,
|
||||
kAudioUnitScope_Output, 0,
|
||||
channelLayout,
|
||||
UInt32(MemoryLayout<AudioChannelLayout>.size))
|
||||
}
|
||||
if unit == audioUnitForTimePitch {
|
||||
var inputCallbackStruct = renderCallbackStruct()
|
||||
AudioUnitSetProperty(unit,
|
||||
kAudioUnitProperty_SetRenderCallback,
|
||||
kAudioUnitScope_Input, 0,
|
||||
&inputCallbackStruct,
|
||||
UInt32(MemoryLayout<AURenderCallbackStruct>.size))
|
||||
}
|
||||
}
|
||||
AUGraphInitialize(graph)
|
||||
}
|
||||
|
||||
public func flush() {
|
||||
currentRender = nil
|
||||
#if !os(macOS)
|
||||
outputLatency = AVAudioSession.sharedInstance().outputLatency
|
||||
#endif
|
||||
}
|
||||
|
||||
deinit {
|
||||
AUGraphStop(graph)
|
||||
AUGraphUninitialize(graph)
|
||||
AUGraphClose(graph)
|
||||
DisposeAUGraph(graph)
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioGraphPlayer {
|
||||
private func renderCallbackStruct() -> AURenderCallbackStruct {
|
||||
var inputCallbackStruct = AURenderCallbackStruct()
|
||||
inputCallbackStruct.inputProcRefCon = Unmanaged.passUnretained(self).toOpaque()
|
||||
inputCallbackStruct.inputProc = { refCon, _, _, _, inNumberFrames, ioData in
|
||||
guard let ioData else {
|
||||
return noErr
|
||||
}
|
||||
let `self` = Unmanaged<AudioGraphPlayer>.fromOpaque(refCon).takeUnretainedValue()
|
||||
self.audioPlayerShouldInputData(ioData: UnsafeMutableAudioBufferListPointer(ioData), numberOfFrames: inNumberFrames)
|
||||
return noErr
|
||||
}
|
||||
return inputCallbackStruct
|
||||
}
|
||||
|
||||
private func addRenderNotify(audioUnit: AudioUnit) {
|
||||
AudioUnitAddRenderNotify(audioUnit, { refCon, ioActionFlags, inTimeStamp, _, _, _ in
|
||||
let `self` = Unmanaged<AudioGraphPlayer>.fromOpaque(refCon).takeUnretainedValue()
|
||||
autoreleasepool {
|
||||
if ioActionFlags.pointee.contains(.unitRenderAction_PostRender) {
|
||||
self.audioPlayerDidRenderSample(sampleTimestamp: inTimeStamp.pointee)
|
||||
}
|
||||
}
|
||||
return noErr
|
||||
}, Unmanaged.passUnretained(self).toOpaque())
|
||||
}
|
||||
|
||||
private func audioPlayerShouldInputData(ioData: UnsafeMutableAudioBufferListPointer, numberOfFrames: UInt32) {
|
||||
var ioDataWriteOffset = 0
|
||||
var numberOfSamples = numberOfFrames
|
||||
while numberOfSamples > 0 {
|
||||
if currentRender == nil {
|
||||
currentRender = renderSource?.getAudioOutputRender()
|
||||
}
|
||||
guard let currentRender else {
|
||||
break
|
||||
}
|
||||
let residueLinesize = currentRender.numberOfSamples - currentRenderReadOffset
|
||||
guard residueLinesize > 0 else {
|
||||
self.currentRender = nil
|
||||
continue
|
||||
}
|
||||
if sourceNodeAudioFormat != currentRender.audioFormat {
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.prepare(audioFormat: currentRender.audioFormat)
|
||||
}
|
||||
return
|
||||
}
|
||||
let framesToCopy = min(numberOfSamples, residueLinesize)
|
||||
let bytesToCopy = Int(framesToCopy * sampleSize)
|
||||
let offset = Int(currentRenderReadOffset * sampleSize)
|
||||
for i in 0 ..< min(ioData.count, currentRender.data.count) {
|
||||
if let source = currentRender.data[i], let destination = ioData[i].mData {
|
||||
(destination + ioDataWriteOffset).copyMemory(from: source + offset, byteCount: bytesToCopy)
|
||||
}
|
||||
}
|
||||
numberOfSamples -= framesToCopy
|
||||
ioDataWriteOffset += bytesToCopy
|
||||
currentRenderReadOffset += framesToCopy
|
||||
}
|
||||
let sizeCopied = (numberOfFrames - numberOfSamples) * sampleSize
|
||||
for i in 0 ..< ioData.count {
|
||||
let sizeLeft = Int(ioData[i].mDataByteSize - sizeCopied)
|
||||
if sizeLeft > 0 {
|
||||
memset(ioData[i].mData! + Int(sizeCopied), 0, sizeLeft)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func audioPlayerDidRenderSample(sampleTimestamp _: AudioTimeStamp) {
|
||||
if let currentRender {
|
||||
let currentPreparePosition = currentRender.timestamp + currentRender.duration * Int64(currentRenderReadOffset) / Int64(currentRender.numberOfSamples)
|
||||
if currentPreparePosition > 0 {
|
||||
var time = currentRender.timebase.cmtime(for: currentPreparePosition)
|
||||
if outputLatency != 0 {
|
||||
time = time - CMTime(seconds: outputLatency, preferredTimescale: time.timescale)
|
||||
}
|
||||
renderSource?.setAudio(time: time, position: currentRender.position)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
//
|
||||
// AudioRendererPlayer.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2022/12/2.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
public class AudioRendererPlayer: AudioOutput {
|
||||
public var playbackRate: Float = 1 {
|
||||
didSet {
|
||||
if !isPaused {
|
||||
synchronizer.rate = playbackRate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var volume: Float {
|
||||
get {
|
||||
renderer.volume
|
||||
}
|
||||
set {
|
||||
renderer.volume = newValue
|
||||
}
|
||||
}
|
||||
|
||||
public var isMuted: Bool {
|
||||
get {
|
||||
renderer.isMuted
|
||||
}
|
||||
set {
|
||||
renderer.isMuted = newValue
|
||||
}
|
||||
}
|
||||
|
||||
public weak var renderSource: OutputRenderSourceDelegate?
|
||||
private var periodicTimeObserver: Any?
|
||||
private let renderer = AVSampleBufferAudioRenderer()
|
||||
private let synchronizer = AVSampleBufferRenderSynchronizer()
|
||||
private let serializationQueue = DispatchQueue(label: "ks.player.serialization.queue")
|
||||
var isPaused: Bool {
|
||||
synchronizer.rate == 0
|
||||
}
|
||||
|
||||
public required init() {
|
||||
synchronizer.addRenderer(renderer)
|
||||
if #available(macOS 11.3, iOS 14.5, tvOS 14.5, *) {
|
||||
synchronizer.delaysRateChangeUntilHasSufficientMediaData = false
|
||||
}
|
||||
// if #available(tvOS 15.0, iOS 15.0, macOS 12.0, *) {
|
||||
// renderer.allowedAudioSpatializationFormats = .monoStereoAndMultichannel
|
||||
// }
|
||||
}
|
||||
|
||||
public func prepare(audioFormat: AVAudioFormat) {
|
||||
#if !os(macOS)
|
||||
try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(Int(audioFormat.channelCount))
|
||||
KSLog("[audio] set preferredOutputNumberOfChannels: \(audioFormat.channelCount)")
|
||||
#endif
|
||||
}
|
||||
|
||||
public func play() {
|
||||
let time: CMTime
|
||||
if #available(macOS 11.3, iOS 14.5, tvOS 14.5, *) {
|
||||
// 判断是否有足够的缓存,有的话就用当前的时间。seek的话,需要清空缓存,这样才能取到最新的时间。
|
||||
if renderer.hasSufficientMediaDataForReliablePlaybackStart {
|
||||
time = synchronizer.currentTime()
|
||||
} else {
|
||||
if let currentRender = renderSource?.getAudioOutputRender() {
|
||||
time = currentRender.cmtime
|
||||
} else {
|
||||
time = .zero
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let currentRender = renderSource?.getAudioOutputRender() {
|
||||
time = currentRender.cmtime
|
||||
} else {
|
||||
time = .zero
|
||||
}
|
||||
}
|
||||
synchronizer.setRate(playbackRate, time: time)
|
||||
// 要手动的调用下,这样才能及时的更新音频的时间
|
||||
renderSource?.setAudio(time: time, position: -1)
|
||||
renderer.requestMediaDataWhenReady(on: serializationQueue) { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.request()
|
||||
}
|
||||
periodicTimeObserver = synchronizer.addPeriodicTimeObserver(forInterval: CMTime(seconds: 0.01), queue: .main) { [weak self] time in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.renderSource?.setAudio(time: time, position: -1)
|
||||
}
|
||||
}
|
||||
|
||||
public func pause() {
|
||||
synchronizer.rate = 0
|
||||
renderer.stopRequestingMediaData()
|
||||
if let periodicTimeObserver {
|
||||
synchronizer.removeTimeObserver(periodicTimeObserver)
|
||||
self.periodicTimeObserver = nil
|
||||
}
|
||||
}
|
||||
|
||||
public func flush() {
|
||||
renderer.flush()
|
||||
}
|
||||
|
||||
private func request() {
|
||||
while renderer.isReadyForMoreMediaData, !isPaused {
|
||||
guard var render = renderSource?.getAudioOutputRender() else {
|
||||
break
|
||||
}
|
||||
var array = [render]
|
||||
let loopCount = Int32(render.audioFormat.sampleRate) / 20 / Int32(render.numberOfSamples) - 2
|
||||
if loopCount > 0 {
|
||||
for _ in 0 ..< loopCount {
|
||||
if let render = renderSource?.getAudioOutputRender() {
|
||||
array.append(render)
|
||||
}
|
||||
}
|
||||
}
|
||||
if array.count > 1 {
|
||||
render = AudioFrame(array: array)
|
||||
}
|
||||
if let sampleBuffer = render.toCMSampleBuffer() {
|
||||
let channelCount = render.audioFormat.channelCount
|
||||
renderer.audioTimePitchAlgorithm = channelCount > 2 ? .spectral : .timeDomain
|
||||
renderer.enqueue(sampleBuffer)
|
||||
#if !os(macOS)
|
||||
if AVAudioSession.sharedInstance().preferredInputNumberOfChannels != channelCount {
|
||||
try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(Int(channelCount))
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
197
KSPlayer-main/Sources/KSPlayer/MEPlayer/AudioUnitPlayer.swift
Normal file
197
KSPlayer-main/Sources/KSPlayer/MEPlayer/AudioUnitPlayer.swift
Normal file
@@ -0,0 +1,197 @@
|
||||
//
|
||||
// AudioUnitPlayer.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/16.
|
||||
//
|
||||
|
||||
import AudioToolbox
|
||||
import AVFAudio
|
||||
import CoreAudio
|
||||
|
||||
public final class AudioUnitPlayer: AudioOutput {
|
||||
private var audioUnitForOutput: AudioUnit!
|
||||
private var currentRenderReadOffset = UInt32(0)
|
||||
private var sourceNodeAudioFormat: AVAudioFormat?
|
||||
private var sampleSize = UInt32(MemoryLayout<Float>.size)
|
||||
public weak var renderSource: OutputRenderSourceDelegate?
|
||||
private var currentRender: AudioFrame? {
|
||||
didSet {
|
||||
if currentRender == nil {
|
||||
currentRenderReadOffset = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private var isPlaying = false
|
||||
public func play() {
|
||||
if !isPlaying {
|
||||
isPlaying = true
|
||||
AudioOutputUnitStart(audioUnitForOutput)
|
||||
}
|
||||
}
|
||||
|
||||
public func pause() {
|
||||
if isPlaying {
|
||||
isPlaying = false
|
||||
AudioOutputUnitStop(audioUnitForOutput)
|
||||
}
|
||||
}
|
||||
|
||||
public var playbackRate: Float = 1
|
||||
public var volume: Float = 1
|
||||
public var isMuted: Bool = false
|
||||
private var outputLatency = TimeInterval(0)
|
||||
public init() {
|
||||
var descriptionForOutput = AudioComponentDescription()
|
||||
descriptionForOutput.componentType = kAudioUnitType_Output
|
||||
descriptionForOutput.componentManufacturer = kAudioUnitManufacturer_Apple
|
||||
#if os(macOS)
|
||||
descriptionForOutput.componentSubType = kAudioUnitSubType_HALOutput
|
||||
#else
|
||||
descriptionForOutput.componentSubType = kAudioUnitSubType_RemoteIO
|
||||
outputLatency = AVAudioSession.sharedInstance().outputLatency
|
||||
#endif
|
||||
let nodeForOutput = AudioComponentFindNext(nil, &descriptionForOutput)
|
||||
AudioComponentInstanceNew(nodeForOutput!, &audioUnitForOutput)
|
||||
var value = UInt32(1)
|
||||
AudioUnitSetProperty(audioUnitForOutput,
|
||||
kAudioOutputUnitProperty_EnableIO,
|
||||
kAudioUnitScope_Output, 0,
|
||||
&value,
|
||||
UInt32(MemoryLayout<UInt32>.size))
|
||||
}
|
||||
|
||||
public func prepare(audioFormat: AVAudioFormat) {
|
||||
if sourceNodeAudioFormat == audioFormat {
|
||||
return
|
||||
}
|
||||
sourceNodeAudioFormat = audioFormat
|
||||
#if !os(macOS)
|
||||
try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(Int(audioFormat.channelCount))
|
||||
KSLog("[audio] set preferredOutputNumberOfChannels: \(audioFormat.channelCount)")
|
||||
#endif
|
||||
sampleSize = audioFormat.sampleSize
|
||||
var audioStreamBasicDescription = audioFormat.formatDescription.audioStreamBasicDescription
|
||||
AudioUnitSetProperty(audioUnitForOutput,
|
||||
kAudioUnitProperty_StreamFormat,
|
||||
kAudioUnitScope_Input, 0,
|
||||
&audioStreamBasicDescription,
|
||||
UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
|
||||
let channelLayout = audioFormat.channelLayout?.layout
|
||||
AudioUnitSetProperty(audioUnitForOutput,
|
||||
kAudioUnitProperty_AudioChannelLayout,
|
||||
kAudioUnitScope_Input, 0,
|
||||
channelLayout,
|
||||
UInt32(MemoryLayout<AudioChannelLayout>.size))
|
||||
var inputCallbackStruct = renderCallbackStruct()
|
||||
AudioUnitSetProperty(audioUnitForOutput,
|
||||
kAudioUnitProperty_SetRenderCallback,
|
||||
kAudioUnitScope_Input, 0,
|
||||
&inputCallbackStruct,
|
||||
UInt32(MemoryLayout<AURenderCallbackStruct>.size))
|
||||
addRenderNotify(audioUnit: audioUnitForOutput)
|
||||
AudioUnitInitialize(audioUnitForOutput)
|
||||
}
|
||||
|
||||
public func flush() {
|
||||
currentRender = nil
|
||||
#if !os(macOS)
|
||||
outputLatency = AVAudioSession.sharedInstance().outputLatency
|
||||
#endif
|
||||
}
|
||||
|
||||
deinit {
|
||||
AudioUnitUninitialize(audioUnitForOutput)
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioUnitPlayer {
|
||||
private func renderCallbackStruct() -> AURenderCallbackStruct {
|
||||
var inputCallbackStruct = AURenderCallbackStruct()
|
||||
inputCallbackStruct.inputProcRefCon = Unmanaged.passUnretained(self).toOpaque()
|
||||
inputCallbackStruct.inputProc = { refCon, _, _, _, inNumberFrames, ioData in
|
||||
guard let ioData else {
|
||||
return noErr
|
||||
}
|
||||
let `self` = Unmanaged<AudioUnitPlayer>.fromOpaque(refCon).takeUnretainedValue()
|
||||
self.audioPlayerShouldInputData(ioData: UnsafeMutableAudioBufferListPointer(ioData), numberOfFrames: inNumberFrames)
|
||||
return noErr
|
||||
}
|
||||
return inputCallbackStruct
|
||||
}
|
||||
|
||||
private func addRenderNotify(audioUnit: AudioUnit) {
|
||||
AudioUnitAddRenderNotify(audioUnit, { refCon, ioActionFlags, inTimeStamp, _, _, _ in
|
||||
let `self` = Unmanaged<AudioUnitPlayer>.fromOpaque(refCon).takeUnretainedValue()
|
||||
autoreleasepool {
|
||||
if ioActionFlags.pointee.contains(.unitRenderAction_PostRender) {
|
||||
self.audioPlayerDidRenderSample(sampleTimestamp: inTimeStamp.pointee)
|
||||
}
|
||||
}
|
||||
return noErr
|
||||
}, Unmanaged.passUnretained(self).toOpaque())
|
||||
}
|
||||
|
||||
private func audioPlayerShouldInputData(ioData: UnsafeMutableAudioBufferListPointer, numberOfFrames: UInt32) {
|
||||
var ioDataWriteOffset = 0
|
||||
var numberOfSamples = numberOfFrames
|
||||
while numberOfSamples > 0 {
|
||||
if currentRender == nil {
|
||||
currentRender = renderSource?.getAudioOutputRender()
|
||||
}
|
||||
guard let currentRender else {
|
||||
break
|
||||
}
|
||||
let residueLinesize = currentRender.numberOfSamples - currentRenderReadOffset
|
||||
guard residueLinesize > 0 else {
|
||||
self.currentRender = nil
|
||||
continue
|
||||
}
|
||||
if sourceNodeAudioFormat != currentRender.audioFormat {
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.prepare(audioFormat: currentRender.audioFormat)
|
||||
}
|
||||
return
|
||||
}
|
||||
let framesToCopy = min(numberOfSamples, residueLinesize)
|
||||
let bytesToCopy = Int(framesToCopy * sampleSize)
|
||||
let offset = Int(currentRenderReadOffset * sampleSize)
|
||||
for i in 0 ..< min(ioData.count, currentRender.data.count) {
|
||||
if let source = currentRender.data[i], let destination = ioData[i].mData {
|
||||
if isMuted {
|
||||
memset(destination + ioDataWriteOffset, 0, bytesToCopy)
|
||||
} else {
|
||||
(destination + ioDataWriteOffset).copyMemory(from: source + offset, byteCount: bytesToCopy)
|
||||
}
|
||||
}
|
||||
}
|
||||
numberOfSamples -= framesToCopy
|
||||
ioDataWriteOffset += bytesToCopy
|
||||
currentRenderReadOffset += framesToCopy
|
||||
}
|
||||
let sizeCopied = (numberOfFrames - numberOfSamples) * sampleSize
|
||||
for i in 0 ..< ioData.count {
|
||||
let sizeLeft = Int(ioData[i].mDataByteSize - sizeCopied)
|
||||
if sizeLeft > 0 {
|
||||
memset(ioData[i].mData! + Int(sizeCopied), 0, sizeLeft)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func audioPlayerDidRenderSample(sampleTimestamp _: AudioTimeStamp) {
|
||||
if let currentRender {
|
||||
let currentPreparePosition = currentRender.timestamp + currentRender.duration * Int64(currentRenderReadOffset) / Int64(currentRender.numberOfSamples)
|
||||
if currentPreparePosition > 0 {
|
||||
var time = currentRender.timebase.cmtime(for: currentPreparePosition)
|
||||
if outputLatency != 0 {
|
||||
time = time - CMTime(seconds: outputLatency, preferredTimescale: time.timescale)
|
||||
}
|
||||
renderSource?.setAudio(time: time, position: currentRender.position)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
183
KSPlayer-main/Sources/KSPlayer/MEPlayer/CircularBuffer.swift
Normal file
183
KSPlayer-main/Sources/KSPlayer/MEPlayer/CircularBuffer.swift
Normal file
@@ -0,0 +1,183 @@
|
||||
//
|
||||
// CircularBuffer.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
/// 这个是单生产者,多消费者的阻塞队列和单生产者,多消费者的阻塞环形队列。并且环形队列还要有排序的能力。
|
||||
/// 因为seek需要清空队列,所以导致他是多消费者。后续可以看下能不能改成单消费者的。
|
||||
public class CircularBuffer<Item: ObjectQueueItem> {
|
||||
private var _buffer = ContiguousArray<Item?>()
|
||||
// private let semaphore = DispatchSemaphore(value: 0)
|
||||
private let condition = NSCondition()
|
||||
private var headIndex = UInt(0)
|
||||
private var tailIndex = UInt(0)
|
||||
private let expanding: Bool
|
||||
private let sorted: Bool
|
||||
private var destroyed = false
|
||||
@inline(__always)
|
||||
private var _count: Int { Int(tailIndex &- headIndex) }
|
||||
@inline(__always)
|
||||
public var count: Int {
|
||||
// condition.lock()
|
||||
// defer { condition.unlock() }
|
||||
Int(tailIndex &- headIndex)
|
||||
}
|
||||
|
||||
public internal(set) var fps: Float = 24
|
||||
public private(set) var maxCount: Int
|
||||
private var mask: UInt
|
||||
public init(initialCapacity: Int = 256, sorted: Bool = false, expanding: Bool = true) {
|
||||
self.expanding = expanding
|
||||
self.sorted = sorted
|
||||
let capacity = initialCapacity.nextPowerOf2()
|
||||
_buffer = ContiguousArray<Item?>(repeating: nil, count: Int(capacity))
|
||||
maxCount = Int(capacity)
|
||||
mask = UInt(maxCount - 1)
|
||||
assert(_buffer.count == capacity)
|
||||
}
|
||||
|
||||
public func push(_ value: Item) {
|
||||
condition.lock()
|
||||
defer { condition.unlock() }
|
||||
if destroyed {
|
||||
return
|
||||
}
|
||||
if _buffer[Int(tailIndex & mask)] != nil {
|
||||
assertionFailure("value is not nil of headIndex: \(headIndex),tailIndex: \(tailIndex), bufferCount: \(_buffer.count), mask: \(mask)")
|
||||
}
|
||||
_buffer[Int(tailIndex & mask)] = value
|
||||
if sorted {
|
||||
// 不用sort进行排序,这个比较高效
|
||||
var index = tailIndex
|
||||
while index > headIndex {
|
||||
guard let item = _buffer[Int((index - 1) & mask)] else {
|
||||
assertionFailure("value is nil of index: \((index - 1) & mask) headIndex: \(headIndex),tailIndex: \(tailIndex), bufferCount: \(_buffer.count), mask: \(mask)")
|
||||
break
|
||||
}
|
||||
if item.timestamp <= _buffer[Int(index & mask)]!.timestamp {
|
||||
break
|
||||
}
|
||||
_buffer.swapAt(Int((index - 1) & mask), Int(index & mask))
|
||||
index -= 1
|
||||
}
|
||||
}
|
||||
tailIndex &+= 1
|
||||
if _count >= maxCount {
|
||||
if expanding {
|
||||
// No more room left for another append so grow the buffer now.
|
||||
_doubleCapacity()
|
||||
} else {
|
||||
condition.wait()
|
||||
}
|
||||
} else {
|
||||
// 只有数据了。就signal。因为有可能这是最后的数据了。
|
||||
if _count == 1 {
|
||||
condition.signal()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func pop(wait: Bool = false, where predicate: ((Item, Int) -> Bool)? = nil) -> Item? {
|
||||
condition.lock()
|
||||
defer { condition.unlock() }
|
||||
if destroyed {
|
||||
return nil
|
||||
}
|
||||
if headIndex == tailIndex {
|
||||
if wait {
|
||||
condition.wait()
|
||||
if destroyed || headIndex == tailIndex {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
let index = Int(headIndex & mask)
|
||||
guard let item = _buffer[index] else {
|
||||
assertionFailure("value is nil of index: \(index) headIndex: \(headIndex),tailIndex: \(tailIndex), bufferCount: \(_buffer.count), mask: \(mask)")
|
||||
return nil
|
||||
}
|
||||
if let predicate, !predicate(item, _count) {
|
||||
return nil
|
||||
} else {
|
||||
headIndex &+= 1
|
||||
_buffer[index] = nil
|
||||
if _count == maxCount >> 1 {
|
||||
condition.signal()
|
||||
}
|
||||
return item
|
||||
}
|
||||
}
|
||||
|
||||
public func search(where predicate: (Item) -> Bool) -> [Item] {
|
||||
condition.lock()
|
||||
defer { condition.unlock() }
|
||||
var i = headIndex
|
||||
var result = [Item]()
|
||||
while i < tailIndex {
|
||||
if let item = _buffer[Int(i & mask)] {
|
||||
if predicate(item) {
|
||||
result.append(item)
|
||||
_buffer[Int(i & mask)] = nil
|
||||
headIndex = i + 1
|
||||
}
|
||||
} else {
|
||||
assertionFailure("value is nil of index: \(i) headIndex: \(headIndex), tailIndex: \(tailIndex), bufferCount: \(_buffer.count), mask: \(mask)")
|
||||
return result
|
||||
}
|
||||
i += 1
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
public func flush() {
|
||||
condition.lock()
|
||||
defer { condition.unlock() }
|
||||
headIndex = 0
|
||||
tailIndex = 0
|
||||
_buffer.removeAll(keepingCapacity: !destroyed)
|
||||
_buffer.append(contentsOf: ContiguousArray<Item?>(repeating: nil, count: destroyed ? 1 : maxCount))
|
||||
condition.broadcast()
|
||||
}
|
||||
|
||||
public func shutdown() {
|
||||
destroyed = true
|
||||
flush()
|
||||
}
|
||||
|
||||
private func _doubleCapacity() {
|
||||
var newBacking: ContiguousArray<Item?> = []
|
||||
let newCapacity = maxCount << 1 // Double the storage.
|
||||
precondition(newCapacity > 0, "Can't double capacity of \(_buffer.count)")
|
||||
assert(newCapacity % 2 == 0)
|
||||
newBacking.reserveCapacity(newCapacity)
|
||||
let head = Int(headIndex & mask)
|
||||
newBacking.append(contentsOf: _buffer[head ..< maxCount])
|
||||
if head > 0 {
|
||||
newBacking.append(contentsOf: _buffer[0 ..< head])
|
||||
}
|
||||
let repeatitionCount = newCapacity &- newBacking.count
|
||||
newBacking.append(contentsOf: repeatElement(nil, count: repeatitionCount))
|
||||
headIndex = 0
|
||||
tailIndex = UInt(newBacking.count &- repeatitionCount)
|
||||
_buffer = newBacking
|
||||
maxCount = newCapacity
|
||||
mask = UInt(maxCount - 1)
|
||||
}
|
||||
}
|
||||
|
||||
extension FixedWidthInteger {
|
||||
/// Returns the next power of two.
|
||||
@inline(__always)
|
||||
func nextPowerOf2() -> Self {
|
||||
guard self != 0 else {
|
||||
return 1
|
||||
}
|
||||
return 1 << (Self.bitWidth - (self - 1).leadingZeroBitCount)
|
||||
}
|
||||
}
|
||||
29
KSPlayer-main/Sources/KSPlayer/MEPlayer/EmbedDataSouce.swift
Normal file
29
KSPlayer-main/Sources/KSPlayer/MEPlayer/EmbedDataSouce.swift
Normal file
@@ -0,0 +1,29 @@
|
||||
//
|
||||
// EmbedDataSouce.swift
|
||||
// KSPlayer-7de52535
|
||||
//
|
||||
// Created by kintan on 2018/8/7.
|
||||
//
|
||||
import Foundation
|
||||
import Libavcodec
|
||||
import Libavutil
|
||||
|
||||
extension FFmpegAssetTrack: SubtitleInfo {
|
||||
public var subtitleID: String {
|
||||
String(trackID)
|
||||
}
|
||||
}
|
||||
|
||||
extension FFmpegAssetTrack: KSSubtitleProtocol {
|
||||
public func search(for time: TimeInterval) -> [SubtitlePart] {
|
||||
subtitle?.outputRenderQueue.search { item -> Bool in
|
||||
item.part == time
|
||||
}.map(\.part) ?? []
|
||||
}
|
||||
}
|
||||
|
||||
extension KSMEPlayer: SubtitleDataSouce {
|
||||
public var infos: [any SubtitleInfo] {
|
||||
tracks(mediaType: .subtitle).compactMap { $0 as? (any SubtitleInfo) }
|
||||
}
|
||||
}
|
||||
277
KSPlayer-main/Sources/KSPlayer/MEPlayer/FFmpegAssetTrack.swift
Normal file
277
KSPlayer-main/Sources/KSPlayer/MEPlayer/FFmpegAssetTrack.swift
Normal file
@@ -0,0 +1,277 @@
|
||||
//
|
||||
// FFmpegAssetTrack.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2023/2/12.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import FFmpegKit
|
||||
import Libavformat
|
||||
|
||||
public class FFmpegAssetTrack: MediaPlayerTrack {
|
||||
public private(set) var trackID: Int32 = 0
|
||||
public let codecName: String
|
||||
public var name: String = ""
|
||||
public private(set) var languageCode: String?
|
||||
public var nominalFrameRate: Float = 0
|
||||
public private(set) var avgFrameRate = Timebase.defaultValue
|
||||
public private(set) var realFrameRate = Timebase.defaultValue
|
||||
public private(set) var bitRate: Int64 = 0
|
||||
public let mediaType: AVFoundation.AVMediaType
|
||||
public let formatName: String?
|
||||
public let bitDepth: Int32
|
||||
private var stream: UnsafeMutablePointer<AVStream>?
|
||||
var startTime = CMTime.zero
|
||||
var codecpar: AVCodecParameters
|
||||
var timebase: Timebase = .defaultValue
|
||||
let bitsPerRawSample: Int32
|
||||
// audio
|
||||
public let audioDescriptor: AudioDescriptor?
|
||||
// subtitle
|
||||
public let isImageSubtitle: Bool
|
||||
public var delay: TimeInterval = 0
|
||||
var subtitle: SyncPlayerItemTrack<SubtitleFrame>?
|
||||
// video
|
||||
public private(set) var rotation: Int16 = 0
|
||||
public var dovi: DOVIDecoderConfigurationRecord?
|
||||
public let fieldOrder: FFmpegFieldOrder
|
||||
public let formatDescription: CMFormatDescription?
|
||||
var closedCaptionsTrack: FFmpegAssetTrack?
|
||||
let isConvertNALSize: Bool
|
||||
var seekByBytes = false
|
||||
public var description: String {
|
||||
var description = codecName
|
||||
if let formatName {
|
||||
description += ", \(formatName)"
|
||||
}
|
||||
if bitsPerRawSample > 0 {
|
||||
description += "(\(bitsPerRawSample.kmFormatted) bit)"
|
||||
}
|
||||
if let audioDescriptor {
|
||||
description += ", \(audioDescriptor.sampleRate)Hz"
|
||||
description += ", \(audioDescriptor.channel.description)"
|
||||
}
|
||||
if let formatDescription {
|
||||
if mediaType == .video {
|
||||
let naturalSize = formatDescription.naturalSize
|
||||
description += ", \(Int(naturalSize.width))x\(Int(naturalSize.height))"
|
||||
description += String(format: ", %.2f fps", nominalFrameRate)
|
||||
}
|
||||
}
|
||||
if bitRate > 0 {
|
||||
description += ", \(bitRate.kmFormatted)bps"
|
||||
}
|
||||
if let language {
|
||||
description += "(\(language))"
|
||||
}
|
||||
return description
|
||||
}
|
||||
|
||||
convenience init?(stream: UnsafeMutablePointer<AVStream>) {
|
||||
let codecpar = stream.pointee.codecpar.pointee
|
||||
self.init(codecpar: codecpar)
|
||||
self.stream = stream
|
||||
let metadata = toDictionary(stream.pointee.metadata)
|
||||
if let value = metadata["variant_bitrate"] ?? metadata["BPS"], let bitRate = Int64(value) {
|
||||
self.bitRate = bitRate
|
||||
}
|
||||
trackID = stream.pointee.index
|
||||
var timebase = Timebase(stream.pointee.time_base)
|
||||
if timebase.num <= 0 || timebase.den <= 0 {
|
||||
timebase = Timebase(num: 1, den: 1000)
|
||||
}
|
||||
if stream.pointee.start_time != Int64.min {
|
||||
startTime = timebase.cmtime(for: stream.pointee.start_time)
|
||||
}
|
||||
self.timebase = timebase
|
||||
avgFrameRate = Timebase(stream.pointee.avg_frame_rate)
|
||||
realFrameRate = Timebase(stream.pointee.r_frame_rate)
|
||||
if mediaType == .audio {
|
||||
var frameSize = codecpar.frame_size
|
||||
if frameSize < 1 {
|
||||
frameSize = timebase.den / timebase.num
|
||||
}
|
||||
nominalFrameRate = max(Float(codecpar.sample_rate / frameSize), 48)
|
||||
} else {
|
||||
if stream.pointee.duration > 0, stream.pointee.nb_frames > 0, stream.pointee.nb_frames != stream.pointee.duration {
|
||||
nominalFrameRate = Float(stream.pointee.nb_frames) * Float(timebase.den) / Float(stream.pointee.duration) * Float(timebase.num)
|
||||
} else if avgFrameRate.den > 0, avgFrameRate.num > 0 {
|
||||
nominalFrameRate = Float(avgFrameRate.num) / Float(avgFrameRate.den)
|
||||
} else {
|
||||
nominalFrameRate = 24
|
||||
}
|
||||
}
|
||||
|
||||
if let value = metadata["language"], value != "und" {
|
||||
languageCode = value
|
||||
} else {
|
||||
languageCode = nil
|
||||
}
|
||||
if let value = metadata["title"] {
|
||||
name = value
|
||||
} else {
|
||||
name = languageCode ?? codecName
|
||||
}
|
||||
// AV_DISPOSITION_DEFAULT
|
||||
if mediaType == .subtitle {
|
||||
isEnabled = !isImageSubtitle || stream.pointee.disposition & AV_DISPOSITION_FORCED == AV_DISPOSITION_FORCED
|
||||
if stream.pointee.disposition & AV_DISPOSITION_HEARING_IMPAIRED == AV_DISPOSITION_HEARING_IMPAIRED {
|
||||
name += "(hearing impaired)"
|
||||
}
|
||||
}
|
||||
// var buf = [Int8](repeating: 0, count: 256)
|
||||
// avcodec_string(&buf, buf.count, codecpar, 0)
|
||||
}
|
||||
|
||||
init?(codecpar: AVCodecParameters) {
|
||||
self.codecpar = codecpar
|
||||
bitRate = codecpar.bit_rate
|
||||
// codec_tag byte order is LSB first CMFormatDescription.MediaSubType(rawValue: codecpar.codec_tag.bigEndian)
|
||||
let codecType = codecpar.codec_id.mediaSubType
|
||||
var codecName = ""
|
||||
if let descriptor = avcodec_descriptor_get(codecpar.codec_id) {
|
||||
codecName += String(cString: descriptor.pointee.name)
|
||||
if let profile = descriptor.pointee.profiles {
|
||||
codecName += " (\(String(cString: profile.pointee.name)))"
|
||||
}
|
||||
} else {
|
||||
codecName = ""
|
||||
}
|
||||
self.codecName = codecName
|
||||
fieldOrder = FFmpegFieldOrder(rawValue: UInt8(codecpar.field_order.rawValue)) ?? .unknown
|
||||
var formatDescriptionOut: CMFormatDescription?
|
||||
if codecpar.codec_type == AVMEDIA_TYPE_AUDIO {
|
||||
mediaType = .audio
|
||||
audioDescriptor = AudioDescriptor(codecpar: codecpar)
|
||||
isConvertNALSize = false
|
||||
bitDepth = 0
|
||||
let layout = codecpar.ch_layout
|
||||
let channelsPerFrame = UInt32(layout.nb_channels)
|
||||
let sampleFormat = AVSampleFormat(codecpar.format)
|
||||
let bytesPerSample = UInt32(av_get_bytes_per_sample(sampleFormat))
|
||||
let formatFlags = ((sampleFormat == AV_SAMPLE_FMT_FLT || sampleFormat == AV_SAMPLE_FMT_DBL) ? kAudioFormatFlagIsFloat : sampleFormat == AV_SAMPLE_FMT_U8 ? 0 : kAudioFormatFlagIsSignedInteger) | kAudioFormatFlagIsPacked
|
||||
var audioStreamBasicDescription = AudioStreamBasicDescription(mSampleRate: Float64(codecpar.sample_rate), mFormatID: codecType.rawValue, mFormatFlags: formatFlags, mBytesPerPacket: bytesPerSample * channelsPerFrame, mFramesPerPacket: 1, mBytesPerFrame: bytesPerSample * channelsPerFrame, mChannelsPerFrame: channelsPerFrame, mBitsPerChannel: bytesPerSample * 8, mReserved: 0)
|
||||
_ = CMAudioFormatDescriptionCreate(allocator: kCFAllocatorDefault, asbd: &audioStreamBasicDescription, layoutSize: 0, layout: nil, magicCookieSize: 0, magicCookie: nil, extensions: nil, formatDescriptionOut: &formatDescriptionOut)
|
||||
if let name = av_get_sample_fmt_name(sampleFormat) {
|
||||
formatName = String(cString: name)
|
||||
} else {
|
||||
formatName = nil
|
||||
}
|
||||
} else if codecpar.codec_type == AVMEDIA_TYPE_VIDEO {
|
||||
audioDescriptor = nil
|
||||
mediaType = .video
|
||||
if codecpar.nb_coded_side_data > 0, let sideDatas = codecpar.coded_side_data {
|
||||
for i in 0 ..< codecpar.nb_coded_side_data {
|
||||
let sideData = sideDatas[Int(i)]
|
||||
if sideData.type == AV_PKT_DATA_DOVI_CONF {
|
||||
dovi = sideData.data.withMemoryRebound(to: DOVIDecoderConfigurationRecord.self, capacity: 1) { $0 }.pointee
|
||||
} else if sideData.type == AV_PKT_DATA_DISPLAYMATRIX {
|
||||
let matrix = sideData.data.withMemoryRebound(to: Int32.self, capacity: 1) { $0 }
|
||||
rotation = Int16(Int(-av_display_rotation_get(matrix)) % 360)
|
||||
}
|
||||
}
|
||||
}
|
||||
let sar = codecpar.sample_aspect_ratio.size
|
||||
var extradataSize = Int32(0)
|
||||
var extradata = codecpar.extradata
|
||||
let atomsData: Data?
|
||||
if let extradata {
|
||||
extradataSize = codecpar.extradata_size
|
||||
if extradataSize >= 5, extradata[4] == 0xFE {
|
||||
extradata[4] = 0xFF
|
||||
isConvertNALSize = true
|
||||
} else {
|
||||
isConvertNALSize = false
|
||||
}
|
||||
atomsData = Data(bytes: extradata, count: Int(extradataSize))
|
||||
} else {
|
||||
if codecType.rawValue == kCMVideoCodecType_VP9 {
|
||||
// ff_videotoolbox_vpcc_extradata_create
|
||||
var ioContext: UnsafeMutablePointer<AVIOContext>?
|
||||
guard avio_open_dyn_buf(&ioContext) == 0 else {
|
||||
return nil
|
||||
}
|
||||
ff_isom_write_vpcc(nil, ioContext, nil, 0, &self.codecpar)
|
||||
extradataSize = avio_close_dyn_buf(ioContext, &extradata)
|
||||
guard let extradata else {
|
||||
return nil
|
||||
}
|
||||
var data = Data()
|
||||
var array: [UInt8] = [1, 0, 0, 0]
|
||||
data.append(&array, count: 4)
|
||||
data.append(extradata, count: Int(extradataSize))
|
||||
atomsData = data
|
||||
} else {
|
||||
atomsData = nil
|
||||
}
|
||||
isConvertNALSize = false
|
||||
}
|
||||
let format = AVPixelFormat(rawValue: codecpar.format)
|
||||
bitDepth = format.bitDepth
|
||||
let fullRange = codecpar.color_range == AVCOL_RANGE_JPEG
|
||||
let dic: NSMutableDictionary = [
|
||||
kCVImageBufferChromaLocationBottomFieldKey: kCVImageBufferChromaLocation_Left,
|
||||
kCVImageBufferChromaLocationTopFieldKey: kCVImageBufferChromaLocation_Left,
|
||||
kCMFormatDescriptionExtension_Depth: format.bitDepth * Int32(format.planeCount),
|
||||
kCMFormatDescriptionExtension_FullRangeVideo: fullRange,
|
||||
codecType.rawValue == kCMVideoCodecType_HEVC ? "EnableHardwareAcceleratedVideoDecoder" : "RequireHardwareAcceleratedVideoDecoder": true,
|
||||
]
|
||||
// kCMFormatDescriptionExtension_BitsPerComponent
|
||||
if let atomsData {
|
||||
dic[kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms] = [codecType.rawValue.avc: atomsData]
|
||||
}
|
||||
dic[kCVPixelBufferPixelFormatTypeKey] = format.osType(fullRange: fullRange)
|
||||
dic[kCVImageBufferPixelAspectRatioKey] = sar.aspectRatio
|
||||
dic[kCVImageBufferColorPrimariesKey] = codecpar.color_primaries.colorPrimaries as String?
|
||||
dic[kCVImageBufferTransferFunctionKey] = codecpar.color_trc.transferFunction as String?
|
||||
dic[kCVImageBufferYCbCrMatrixKey] = codecpar.color_space.ycbcrMatrix as String?
|
||||
// swiftlint:disable line_length
|
||||
_ = CMVideoFormatDescriptionCreate(allocator: kCFAllocatorDefault, codecType: codecType.rawValue, width: codecpar.width, height: codecpar.height, extensions: dic, formatDescriptionOut: &formatDescriptionOut)
|
||||
// swiftlint:enable line_length
|
||||
if let name = av_get_pix_fmt_name(format) {
|
||||
formatName = String(cString: name)
|
||||
} else {
|
||||
formatName = nil
|
||||
}
|
||||
} else if codecpar.codec_type == AVMEDIA_TYPE_SUBTITLE {
|
||||
mediaType = .subtitle
|
||||
audioDescriptor = nil
|
||||
formatName = nil
|
||||
bitDepth = 0
|
||||
isConvertNALSize = false
|
||||
_ = CMFormatDescriptionCreate(allocator: kCFAllocatorDefault, mediaType: kCMMediaType_Subtitle, mediaSubType: codecType.rawValue, extensions: nil, formatDescriptionOut: &formatDescriptionOut)
|
||||
} else {
|
||||
bitDepth = 0
|
||||
return nil
|
||||
}
|
||||
formatDescription = formatDescriptionOut
|
||||
bitsPerRawSample = codecpar.bits_per_raw_sample
|
||||
isImageSubtitle = [AV_CODEC_ID_DVD_SUBTITLE, AV_CODEC_ID_DVB_SUBTITLE, AV_CODEC_ID_DVB_TELETEXT, AV_CODEC_ID_HDMV_PGS_SUBTITLE].contains(codecpar.codec_id)
|
||||
trackID = 0
|
||||
}
|
||||
|
||||
func createContext(options: KSOptions) throws -> UnsafeMutablePointer<AVCodecContext> {
|
||||
try codecpar.createContext(options: options)
|
||||
}
|
||||
|
||||
public var isEnabled: Bool {
|
||||
get {
|
||||
stream?.pointee.discard == AVDISCARD_DEFAULT
|
||||
}
|
||||
set {
|
||||
var discard = newValue ? AVDISCARD_DEFAULT : AVDISCARD_ALL
|
||||
if mediaType == .subtitle, !isImageSubtitle {
|
||||
discard = AVDISCARD_DEFAULT
|
||||
}
|
||||
stream?.pointee.discard = discard
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension FFmpegAssetTrack {
|
||||
var pixelFormatType: OSType? {
|
||||
let format = AVPixelFormat(codecpar.format)
|
||||
return format.osType(fullRange: formatDescription?.fullRangeVideo ?? false)
|
||||
}
|
||||
}
|
||||
206
KSPlayer-main/Sources/KSPlayer/MEPlayer/FFmpegDecode.swift
Normal file
206
KSPlayer-main/Sources/KSPlayer/MEPlayer/FFmpegDecode.swift
Normal file
@@ -0,0 +1,206 @@
|
||||
//
|
||||
// FFmpegDecode.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
import Libavcodec
|
||||
|
||||
class FFmpegDecode: DecodeProtocol {
|
||||
private let options: KSOptions
|
||||
private var coreFrame: UnsafeMutablePointer<AVFrame>? = av_frame_alloc()
|
||||
private var codecContext: UnsafeMutablePointer<AVCodecContext>?
|
||||
private var bestEffortTimestamp = Int64(0)
|
||||
private let frameChange: FrameChange
|
||||
private let filter: MEFilter
|
||||
private let seekByBytes: Bool
|
||||
required init(assetTrack: FFmpegAssetTrack, options: KSOptions) {
|
||||
self.options = options
|
||||
seekByBytes = assetTrack.seekByBytes
|
||||
do {
|
||||
codecContext = try assetTrack.createContext(options: options)
|
||||
} catch {
|
||||
KSLog(error as CustomStringConvertible)
|
||||
}
|
||||
codecContext?.pointee.time_base = assetTrack.timebase.rational
|
||||
filter = MEFilter(timebase: assetTrack.timebase, isAudio: assetTrack.mediaType == .audio, nominalFrameRate: assetTrack.nominalFrameRate, options: options)
|
||||
if assetTrack.mediaType == .video {
|
||||
frameChange = VideoSwresample(fps: assetTrack.nominalFrameRate, isDovi: assetTrack.dovi != nil)
|
||||
} else {
|
||||
frameChange = AudioSwresample(audioDescriptor: assetTrack.audioDescriptor!)
|
||||
}
|
||||
}
|
||||
|
||||
func decodeFrame(from packet: Packet, completionHandler: @escaping (Result<MEFrame, Error>) -> Void) {
|
||||
guard let codecContext, avcodec_send_packet(codecContext, packet.corePacket) == 0 else {
|
||||
return
|
||||
}
|
||||
// 需要avcodec_send_packet之后,properties的值才会变成FF_CODEC_PROPERTY_CLOSED_CAPTIONS
|
||||
if packet.assetTrack.mediaType == .video {
|
||||
if Int32(codecContext.pointee.properties) & FF_CODEC_PROPERTY_CLOSED_CAPTIONS != 0, packet.assetTrack.closedCaptionsTrack == nil {
|
||||
var codecpar = AVCodecParameters()
|
||||
codecpar.codec_type = AVMEDIA_TYPE_SUBTITLE
|
||||
codecpar.codec_id = AV_CODEC_ID_EIA_608
|
||||
if let subtitleAssetTrack = FFmpegAssetTrack(codecpar: codecpar) {
|
||||
subtitleAssetTrack.name = "Closed Captions"
|
||||
subtitleAssetTrack.startTime = packet.assetTrack.startTime
|
||||
subtitleAssetTrack.timebase = packet.assetTrack.timebase
|
||||
let subtitle = SyncPlayerItemTrack<SubtitleFrame>(mediaType: .subtitle, frameCapacity: 255, options: options)
|
||||
subtitleAssetTrack.subtitle = subtitle
|
||||
packet.assetTrack.closedCaptionsTrack = subtitleAssetTrack
|
||||
subtitle.decode()
|
||||
}
|
||||
}
|
||||
}
|
||||
while true {
|
||||
let result = avcodec_receive_frame(codecContext, coreFrame)
|
||||
if result == 0, let inputFrame = coreFrame {
|
||||
var displayData: MasteringDisplayMetadata?
|
||||
var contentData: ContentLightMetadata?
|
||||
var ambientViewingEnvironment: AmbientViewingEnvironment?
|
||||
// filter之后,side_data信息会丢失,所以放在这里
|
||||
if inputFrame.pointee.nb_side_data > 0 {
|
||||
for i in 0 ..< inputFrame.pointee.nb_side_data {
|
||||
if let sideData = inputFrame.pointee.side_data[Int(i)]?.pointee {
|
||||
if sideData.type == AV_FRAME_DATA_A53_CC {
|
||||
if let closedCaptionsTrack = packet.assetTrack.closedCaptionsTrack,
|
||||
let subtitle = closedCaptionsTrack.subtitle
|
||||
{
|
||||
let closedCaptionsPacket = Packet()
|
||||
if let corePacket = packet.corePacket {
|
||||
closedCaptionsPacket.corePacket?.pointee.pts = corePacket.pointee.pts
|
||||
closedCaptionsPacket.corePacket?.pointee.dts = corePacket.pointee.dts
|
||||
closedCaptionsPacket.corePacket?.pointee.pos = corePacket.pointee.pos
|
||||
closedCaptionsPacket.corePacket?.pointee.time_base = corePacket.pointee.time_base
|
||||
closedCaptionsPacket.corePacket?.pointee.stream_index = corePacket.pointee.stream_index
|
||||
}
|
||||
closedCaptionsPacket.corePacket?.pointee.flags |= AV_PKT_FLAG_KEY
|
||||
closedCaptionsPacket.corePacket?.pointee.size = Int32(sideData.size)
|
||||
let buffer = av_buffer_ref(sideData.buf)
|
||||
closedCaptionsPacket.corePacket?.pointee.data = buffer?.pointee.data
|
||||
closedCaptionsPacket.corePacket?.pointee.buf = buffer
|
||||
closedCaptionsPacket.assetTrack = closedCaptionsTrack
|
||||
subtitle.putPacket(packet: closedCaptionsPacket)
|
||||
}
|
||||
} else if sideData.type == AV_FRAME_DATA_SEI_UNREGISTERED {
|
||||
let size = sideData.size
|
||||
if size > AV_UUID_LEN {
|
||||
let str = String(cString: sideData.data.advanced(by: Int(AV_UUID_LEN)))
|
||||
options.sei(string: str)
|
||||
}
|
||||
} else if sideData.type == AV_FRAME_DATA_DOVI_RPU_BUFFER {
|
||||
let data = sideData.data.withMemoryRebound(to: [UInt8].self, capacity: 1) { $0 }
|
||||
} else if sideData.type == AV_FRAME_DATA_DOVI_METADATA { // AVDOVIMetadata
|
||||
let data = sideData.data.withMemoryRebound(to: AVDOVIMetadata.self, capacity: 1) { $0 }
|
||||
let header = av_dovi_get_header(data)
|
||||
let mapping = av_dovi_get_mapping(data)
|
||||
let color = av_dovi_get_color(data)
|
||||
// frame.corePixelBuffer?.transferFunction = kCVImageBufferTransferFunction_ITU_R_2020
|
||||
} else if sideData.type == AV_FRAME_DATA_DYNAMIC_HDR_PLUS { // AVDynamicHDRPlus
|
||||
let data = sideData.data.withMemoryRebound(to: AVDynamicHDRPlus.self, capacity: 1) { $0 }.pointee
|
||||
} else if sideData.type == AV_FRAME_DATA_DYNAMIC_HDR_VIVID { // AVDynamicHDRVivid
|
||||
let data = sideData.data.withMemoryRebound(to: AVDynamicHDRVivid.self, capacity: 1) { $0 }.pointee
|
||||
} else if sideData.type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA {
|
||||
let data = sideData.data.withMemoryRebound(to: AVMasteringDisplayMetadata.self, capacity: 1) { $0 }.pointee
|
||||
displayData = MasteringDisplayMetadata(
|
||||
display_primaries_r_x: UInt16(data.display_primaries.0.0.num).bigEndian,
|
||||
display_primaries_r_y: UInt16(data.display_primaries.0.1.num).bigEndian,
|
||||
display_primaries_g_x: UInt16(data.display_primaries.1.0.num).bigEndian,
|
||||
display_primaries_g_y: UInt16(data.display_primaries.1.1.num).bigEndian,
|
||||
display_primaries_b_x: UInt16(data.display_primaries.2.1.num).bigEndian,
|
||||
display_primaries_b_y: UInt16(data.display_primaries.2.1.num).bigEndian,
|
||||
white_point_x: UInt16(data.white_point.0.num).bigEndian,
|
||||
white_point_y: UInt16(data.white_point.1.num).bigEndian,
|
||||
minLuminance: UInt32(data.min_luminance.num).bigEndian,
|
||||
maxLuminance: UInt32(data.max_luminance.num).bigEndian
|
||||
)
|
||||
} else if sideData.type == AV_FRAME_DATA_CONTENT_LIGHT_LEVEL {
|
||||
let data = sideData.data.withMemoryRebound(to: AVContentLightMetadata.self, capacity: 1) { $0 }.pointee
|
||||
contentData = ContentLightMetadata(
|
||||
MaxCLL: UInt16(data.MaxCLL).bigEndian,
|
||||
MaxFALL: UInt16(data.MaxFALL).bigEndian
|
||||
)
|
||||
} else if sideData.type == AV_FRAME_DATA_AMBIENT_VIEWING_ENVIRONMENT {
|
||||
let data = sideData.data.withMemoryRebound(to: AVAmbientViewingEnvironment.self, capacity: 1) { $0 }.pointee
|
||||
ambientViewingEnvironment = AmbientViewingEnvironment(
|
||||
ambient_illuminance: UInt32(data.ambient_illuminance.num).bigEndian,
|
||||
ambient_light_x: UInt16(data.ambient_light_x.num).bigEndian,
|
||||
ambient_light_y: UInt16(data.ambient_light_y.num).bigEndian
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
filter.filter(options: options, inputFrame: inputFrame) { avframe in
|
||||
do {
|
||||
var frame = try frameChange.change(avframe: avframe)
|
||||
if let videoFrame = frame as? VideoVTBFrame, let pixelBuffer = videoFrame.corePixelBuffer {
|
||||
if let pixelBuffer = pixelBuffer as? PixelBuffer {
|
||||
pixelBuffer.formatDescription = packet.assetTrack.formatDescription
|
||||
}
|
||||
if displayData != nil || contentData != nil || ambientViewingEnvironment != nil {
|
||||
videoFrame.edrMetaData = EDRMetaData(displayData: displayData, contentData: contentData, ambientViewingEnvironment: ambientViewingEnvironment)
|
||||
}
|
||||
}
|
||||
frame.timebase = filter.timebase
|
||||
// frame.timebase = Timebase(avframe.pointee.time_base)
|
||||
frame.size = packet.size
|
||||
frame.position = packet.position
|
||||
frame.duration = avframe.pointee.duration
|
||||
if frame.duration == 0, avframe.pointee.sample_rate != 0, frame.timebase.num != 0 {
|
||||
frame.duration = Int64(avframe.pointee.nb_samples) * Int64(frame.timebase.den) / (Int64(avframe.pointee.sample_rate) * Int64(frame.timebase.num))
|
||||
}
|
||||
var timestamp = avframe.pointee.best_effort_timestamp
|
||||
if timestamp < 0 {
|
||||
timestamp = avframe.pointee.pts
|
||||
}
|
||||
if timestamp < 0 {
|
||||
timestamp = avframe.pointee.pkt_dts
|
||||
}
|
||||
if timestamp < 0 {
|
||||
timestamp = bestEffortTimestamp
|
||||
}
|
||||
frame.timestamp = timestamp
|
||||
bestEffortTimestamp = timestamp &+ frame.duration
|
||||
completionHandler(.success(frame))
|
||||
} catch {
|
||||
completionHandler(.failure(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if result == AVError.eof.code {
|
||||
avcodec_flush_buffers(codecContext)
|
||||
break
|
||||
} else if result == AVError.tryAgain.code {
|
||||
break
|
||||
} else {
|
||||
let error = NSError(errorCode: packet.assetTrack.mediaType == .audio ? .codecAudioReceiveFrame : .codecVideoReceiveFrame, avErrorCode: result)
|
||||
KSLog(error)
|
||||
completionHandler(.failure(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func doFlushCodec() {
|
||||
bestEffortTimestamp = Int64(0)
|
||||
// seek之后要清空下,不然解码可能还会有缓存,导致返回的数据是之前seek的。
|
||||
avcodec_flush_buffers(codecContext)
|
||||
}
|
||||
|
||||
func shutdown() {
|
||||
av_frame_free(&coreFrame)
|
||||
avcodec_free_context(&codecContext)
|
||||
frameChange.shutdown()
|
||||
}
|
||||
|
||||
func decode() {
|
||||
bestEffortTimestamp = Int64(0)
|
||||
if codecContext != nil {
|
||||
avcodec_flush_buffers(codecContext)
|
||||
}
|
||||
}
|
||||
}
|
||||
150
KSPlayer-main/Sources/KSPlayer/MEPlayer/Filter.swift
Normal file
150
KSPlayer-main/Sources/KSPlayer/MEPlayer/Filter.swift
Normal file
@@ -0,0 +1,150 @@
|
||||
//
|
||||
// Filter.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2021/8/7.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import Libavfilter
|
||||
import Libavutil
|
||||
|
||||
class MEFilter {
|
||||
private var graph: UnsafeMutablePointer<AVFilterGraph>?
|
||||
private var bufferSrcContext: UnsafeMutablePointer<AVFilterContext>?
|
||||
private var bufferSinkContext: UnsafeMutablePointer<AVFilterContext>?
|
||||
private var filters: String?
|
||||
let timebase: Timebase
|
||||
private let isAudio: Bool
|
||||
private var params = AVBufferSrcParameters()
|
||||
private let nominalFrameRate: Float
|
||||
deinit {
|
||||
graph?.pointee.opaque = nil
|
||||
avfilter_graph_free(&graph)
|
||||
}
|
||||
|
||||
public init(timebase: Timebase, isAudio: Bool, nominalFrameRate: Float, options: KSOptions) {
|
||||
graph = avfilter_graph_alloc()
|
||||
graph?.pointee.opaque = Unmanaged.passUnretained(options).toOpaque()
|
||||
self.timebase = timebase
|
||||
self.isAudio = isAudio
|
||||
self.nominalFrameRate = nominalFrameRate
|
||||
}
|
||||
|
||||
private func setup(filters: String) -> Bool {
|
||||
var inputs = avfilter_inout_alloc()
|
||||
var outputs = avfilter_inout_alloc()
|
||||
var ret = avfilter_graph_parse2(graph, filters, &inputs, &outputs)
|
||||
guard ret >= 0, let graph, let inputs, let outputs else {
|
||||
avfilter_inout_free(&inputs)
|
||||
avfilter_inout_free(&outputs)
|
||||
return false
|
||||
}
|
||||
let bufferSink = avfilter_get_by_name(isAudio ? "abuffersink" : "buffersink")
|
||||
ret = avfilter_graph_create_filter(&bufferSinkContext, bufferSink, "out", nil, nil, graph)
|
||||
guard ret >= 0 else { return false }
|
||||
ret = avfilter_link(outputs.pointee.filter_ctx, UInt32(outputs.pointee.pad_idx), bufferSinkContext, 0)
|
||||
guard ret >= 0 else { return false }
|
||||
let buffer = avfilter_get_by_name(isAudio ? "abuffer" : "buffer")
|
||||
bufferSrcContext = avfilter_graph_alloc_filter(graph, buffer, "in")
|
||||
guard bufferSrcContext != nil else { return false }
|
||||
av_buffersrc_parameters_set(bufferSrcContext, ¶ms)
|
||||
ret = avfilter_init_str(bufferSrcContext, nil)
|
||||
guard ret >= 0 else { return false }
|
||||
ret = avfilter_link(bufferSrcContext, 0, inputs.pointee.filter_ctx, UInt32(inputs.pointee.pad_idx))
|
||||
guard ret >= 0 else { return false }
|
||||
if let ctx = params.hw_frames_ctx {
|
||||
let framesCtxData = UnsafeMutableRawPointer(ctx.pointee.data).bindMemory(to: AVHWFramesContext.self, capacity: 1)
|
||||
inputs.pointee.filter_ctx.pointee.hw_device_ctx = framesCtxData.pointee.device_ref
|
||||
// outputs.pointee.filter_ctx.pointee.hw_device_ctx = framesCtxData.pointee.device_ref
|
||||
// bufferSrcContext?.pointee.hw_device_ctx = framesCtxData.pointee.device_ref
|
||||
// bufferSinkContext?.pointee.hw_device_ctx = framesCtxData.pointee.device_ref
|
||||
}
|
||||
ret = avfilter_graph_config(graph, nil)
|
||||
guard ret >= 0 else { return false }
|
||||
return true
|
||||
}
|
||||
|
||||
private func setup2(filters: String) -> Bool {
|
||||
guard let graph else {
|
||||
return false
|
||||
}
|
||||
let bufferName = isAudio ? "abuffer" : "buffer"
|
||||
let bufferSrc = avfilter_get_by_name(bufferName)
|
||||
var ret = avfilter_graph_create_filter(&bufferSrcContext, bufferSrc, "ksplayer_\(bufferName)", params.arg, nil, graph)
|
||||
av_buffersrc_parameters_set(bufferSrcContext, ¶ms)
|
||||
let bufferSink = avfilter_get_by_name(bufferName + "sink")
|
||||
ret = avfilter_graph_create_filter(&bufferSinkContext, bufferSink, "ksplayer_\(bufferName)sink", nil, nil, graph)
|
||||
guard ret >= 0 else { return false }
|
||||
// av_opt_set_int_list(bufferSinkContext, "pix_fmts", [AV_PIX_FMT_GRAY8, AV_PIX_FMT_NONE] AV_PIX_FMT_NONE,AV_OPT_SEARCH_CHILDREN)
|
||||
var inputs = avfilter_inout_alloc()
|
||||
var outputs = avfilter_inout_alloc()
|
||||
outputs?.pointee.name = strdup("in")
|
||||
outputs?.pointee.filter_ctx = bufferSrcContext
|
||||
outputs?.pointee.pad_idx = 0
|
||||
outputs?.pointee.next = nil
|
||||
inputs?.pointee.name = strdup("out")
|
||||
inputs?.pointee.filter_ctx = bufferSinkContext
|
||||
inputs?.pointee.pad_idx = 0
|
||||
inputs?.pointee.next = nil
|
||||
let filterNb = Int(graph.pointee.nb_filters)
|
||||
ret = avfilter_graph_parse_ptr(graph, filters, &inputs, &outputs, nil)
|
||||
guard ret >= 0 else {
|
||||
avfilter_inout_free(&inputs)
|
||||
avfilter_inout_free(&outputs)
|
||||
return false
|
||||
}
|
||||
for i in 0 ..< Int(graph.pointee.nb_filters) - filterNb {
|
||||
swap(&graph.pointee.filters[i], &graph.pointee.filters[i + filterNb])
|
||||
}
|
||||
ret = avfilter_graph_config(graph, nil)
|
||||
guard ret >= 0 else { return false }
|
||||
return true
|
||||
}
|
||||
|
||||
public func filter(options: KSOptions, inputFrame: UnsafeMutablePointer<AVFrame>, completionHandler: (UnsafeMutablePointer<AVFrame>) -> Void) {
|
||||
let filters: String
|
||||
if isAudio {
|
||||
filters = options.audioFilters.joined(separator: ",")
|
||||
} else {
|
||||
if options.autoDeInterlace, !options.videoFilters.contains("idet") {
|
||||
options.videoFilters.append("idet")
|
||||
}
|
||||
filters = options.videoFilters.joined(separator: ",")
|
||||
}
|
||||
guard !filters.isEmpty else {
|
||||
completionHandler(inputFrame)
|
||||
return
|
||||
}
|
||||
var params = AVBufferSrcParameters()
|
||||
params.format = inputFrame.pointee.format
|
||||
params.time_base = timebase.rational
|
||||
params.width = inputFrame.pointee.width
|
||||
params.height = inputFrame.pointee.height
|
||||
params.sample_aspect_ratio = inputFrame.pointee.sample_aspect_ratio
|
||||
params.frame_rate = AVRational(num: 1, den: Int32(nominalFrameRate))
|
||||
if let ctx = inputFrame.pointee.hw_frames_ctx {
|
||||
params.hw_frames_ctx = av_buffer_ref(ctx)
|
||||
}
|
||||
params.sample_rate = inputFrame.pointee.sample_rate
|
||||
params.ch_layout = inputFrame.pointee.ch_layout
|
||||
if self.params != params || self.filters != filters {
|
||||
self.params = params
|
||||
self.filters = filters
|
||||
if !setup(filters: filters) {
|
||||
completionHandler(inputFrame)
|
||||
return
|
||||
}
|
||||
}
|
||||
let ret = av_buffersrc_add_frame_flags(bufferSrcContext, inputFrame, 0)
|
||||
if ret < 0 {
|
||||
return
|
||||
}
|
||||
while av_buffersink_get_frame_flags(bufferSinkContext, inputFrame, 0) >= 0 {
|
||||
// timebase = Timebase(av_buffersink_get_time_base(bufferSinkContext))
|
||||
completionHandler(inputFrame)
|
||||
// 一定要加av_frame_unref,不然会内存泄漏。
|
||||
av_frame_unref(inputFrame)
|
||||
}
|
||||
}
|
||||
}
|
||||
588
KSPlayer-main/Sources/KSPlayer/MEPlayer/KSMEPlayer.swift
Normal file
588
KSPlayer-main/Sources/KSPlayer/MEPlayer/KSMEPlayer.swift
Normal file
@@ -0,0 +1,588 @@
|
||||
//
|
||||
// KSMEPlayer.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import AVKit
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
|
||||
public class KSMEPlayer: NSObject {
|
||||
private var loopCount = 1
|
||||
private var playerItem: MEPlayerItem
|
||||
public let audioOutput: AudioOutput
|
||||
private var options: KSOptions
|
||||
private var bufferingCountDownTimer: Timer?
|
||||
public private(set) var videoOutput: (VideoOutput & UIView)? {
|
||||
didSet {
|
||||
oldValue?.invalidate()
|
||||
runOnMainThread {
|
||||
oldValue?.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public private(set) var bufferingProgress = 0 {
|
||||
willSet {
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
delegate?.changeBuffering(player: self, progress: newValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private lazy var _pipController: Any? = {
|
||||
if #available(iOS 15.0, tvOS 15.0, macOS 12.0, *), let videoOutput {
|
||||
let contentSource = AVPictureInPictureController.ContentSource(sampleBufferDisplayLayer: videoOutput.displayLayer, playbackDelegate: self)
|
||||
let pip = KSPictureInPictureController(contentSource: contentSource)
|
||||
return pip
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}()
|
||||
|
||||
@available(tvOS 14.0, *)
|
||||
public var pipController: KSPictureInPictureController? {
|
||||
_pipController as? KSPictureInPictureController
|
||||
}
|
||||
|
||||
private lazy var _playbackCoordinator: Any? = {
|
||||
if #available(macOS 12.0, iOS 15.0, tvOS 15.0, *) {
|
||||
let coordinator = AVDelegatingPlaybackCoordinator(playbackControlDelegate: self)
|
||||
coordinator.suspensionReasonsThatTriggerWaiting = [.stallRecovery]
|
||||
return coordinator
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}()
|
||||
|
||||
@available(macOS 12.0, iOS 15.0, tvOS 15.0, *)
|
||||
public var playbackCoordinator: AVPlaybackCoordinator {
|
||||
// swiftlint:disable force_cast
|
||||
_playbackCoordinator as! AVPlaybackCoordinator
|
||||
// swiftlint:enable force_cast
|
||||
}
|
||||
|
||||
public private(set) var playableTime = TimeInterval(0)
|
||||
public weak var delegate: MediaPlayerDelegate?
|
||||
public private(set) var isReadyToPlay = false
|
||||
public var allowsExternalPlayback: Bool = false
|
||||
public var usesExternalPlaybackWhileExternalScreenIsActive: Bool = false
|
||||
|
||||
public var playbackRate: Float = 1 {
|
||||
didSet {
|
||||
if playbackRate != audioOutput.playbackRate {
|
||||
audioOutput.playbackRate = playbackRate
|
||||
if audioOutput is AudioUnitPlayer {
|
||||
var audioFilters = options.audioFilters.filter {
|
||||
!$0.hasPrefix("atempo=")
|
||||
}
|
||||
if playbackRate != 1 {
|
||||
audioFilters.append("atempo=\(playbackRate)")
|
||||
}
|
||||
options.audioFilters = audioFilters
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public private(set) var loadState = MediaLoadState.idle {
|
||||
didSet {
|
||||
if loadState != oldValue {
|
||||
playOrPause()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public private(set) var playbackState = MediaPlaybackState.idle {
|
||||
didSet {
|
||||
if playbackState != oldValue {
|
||||
playOrPause()
|
||||
if playbackState == .finished {
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
delegate?.finish(player: self, error: nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public required init(url: URL, options: KSOptions) {
|
||||
KSOptions.setAudioSession()
|
||||
audioOutput = KSOptions.audioPlayerType.init()
|
||||
playerItem = MEPlayerItem(url: url, options: options)
|
||||
if options.videoDisable {
|
||||
videoOutput = nil
|
||||
} else {
|
||||
videoOutput = KSOptions.videoPlayerType.init(options: options)
|
||||
}
|
||||
self.options = options
|
||||
super.init()
|
||||
playerItem.delegate = self
|
||||
audioOutput.renderSource = playerItem
|
||||
videoOutput?.renderSource = playerItem
|
||||
videoOutput?.displayLayerDelegate = self
|
||||
#if !os(macOS)
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(audioRouteChange), name: AVAudioSession.routeChangeNotification, object: AVAudioSession.sharedInstance())
|
||||
if #available(tvOS 15.0, iOS 15.0, *) {
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(spatialCapabilityChange), name: AVAudioSession.spatialPlaybackCapabilitiesChangedNotification, object: nil)
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
deinit {
|
||||
#if !os(macOS)
|
||||
try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(2)
|
||||
#endif
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
videoOutput?.invalidate()
|
||||
playerItem.shutdown()
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - private functions
|
||||
|
||||
private extension KSMEPlayer {
|
||||
func playOrPause() {
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
let isPaused = !(self.playbackState == .playing && self.loadState == .playable)
|
||||
if isPaused {
|
||||
self.audioOutput.pause()
|
||||
self.videoOutput?.pause()
|
||||
} else {
|
||||
self.audioOutput.play()
|
||||
self.videoOutput?.play()
|
||||
}
|
||||
self.delegate?.changeLoadState(player: self)
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func spatialCapabilityChange(notification _: Notification) {
|
||||
KSLog("[audio] spatialCapabilityChange")
|
||||
for track in tracks(mediaType: .audio) {
|
||||
(track as? FFmpegAssetTrack)?.audioDescriptor?.updateAudioFormat()
|
||||
}
|
||||
}
|
||||
|
||||
#if !os(macOS)
|
||||
@objc private func audioRouteChange(notification: Notification) {
|
||||
KSLog("[audio] audioRouteChange")
|
||||
guard let reason = notification.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt else {
|
||||
return
|
||||
}
|
||||
// let routeChangeReason = AVAudioSession.RouteChangeReason(rawValue: reason)
|
||||
// guard [AVAudioSession.RouteChangeReason.newDeviceAvailable, .oldDeviceUnavailable, .routeConfigurationChange].contains(routeChangeReason) else {
|
||||
// return
|
||||
// }
|
||||
for track in tracks(mediaType: .audio) {
|
||||
(track as? FFmpegAssetTrack)?.audioDescriptor?.updateAudioFormat()
|
||||
}
|
||||
audioOutput.flush()
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
extension KSMEPlayer: MEPlayerDelegate {
|
||||
func sourceDidOpened() {
|
||||
isReadyToPlay = true
|
||||
options.readyTime = CACurrentMediaTime()
|
||||
let vidoeTracks = tracks(mediaType: .video)
|
||||
if vidoeTracks.isEmpty {
|
||||
videoOutput = nil
|
||||
}
|
||||
let audioDescriptor = tracks(mediaType: .audio).first { $0.isEnabled }.flatMap {
|
||||
$0 as? FFmpegAssetTrack
|
||||
}?.audioDescriptor
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
if let audioDescriptor {
|
||||
KSLog("[audio] audio type: \(audioOutput) prepare audioFormat )")
|
||||
audioOutput.prepare(audioFormat: audioDescriptor.audioFormat)
|
||||
}
|
||||
if let controlTimebase = videoOutput?.displayLayer.controlTimebase, options.startPlayTime > 1 {
|
||||
CMTimebaseSetTime(controlTimebase, time: CMTimeMake(value: Int64(options.startPlayTime), timescale: 1))
|
||||
}
|
||||
delegate?.readyToPlay(player: self)
|
||||
}
|
||||
}
|
||||
|
||||
func sourceDidFailed(error: NSError?) {
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
self.delegate?.finish(player: self, error: error)
|
||||
}
|
||||
}
|
||||
|
||||
func sourceDidFinished() {
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
if self.options.isLoopPlay {
|
||||
self.loopCount += 1
|
||||
self.delegate?.playBack(player: self, loopCount: self.loopCount)
|
||||
self.audioOutput.play()
|
||||
self.videoOutput?.play()
|
||||
} else {
|
||||
self.playbackState = .finished
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func sourceDidChange(loadingState: LoadingState) {
|
||||
if loadingState.isEndOfFile {
|
||||
playableTime = duration
|
||||
} else {
|
||||
playableTime = currentPlaybackTime + loadingState.loadedTime
|
||||
}
|
||||
if loadState == .playable {
|
||||
if !loadingState.isEndOfFile, loadingState.frameCount == 0, loadingState.packetCount == 0, options.preferredForwardBufferDuration != 0 {
|
||||
loadState = .loading
|
||||
if playbackState == .playing {
|
||||
runOnMainThread { [weak self] in
|
||||
// 在主线程更新进度
|
||||
self?.bufferingProgress = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if loadingState.isFirst {
|
||||
if videoOutput?.pixelBuffer == nil {
|
||||
videoOutput?.readNextFrame()
|
||||
}
|
||||
}
|
||||
var progress = 100
|
||||
if loadingState.isPlayable {
|
||||
loadState = .playable
|
||||
} else {
|
||||
if loadingState.progress.isInfinite {
|
||||
progress = 100
|
||||
} else if loadingState.progress.isNaN {
|
||||
progress = 0
|
||||
} else {
|
||||
progress = min(100, Int(loadingState.progress))
|
||||
}
|
||||
}
|
||||
if playbackState == .playing {
|
||||
runOnMainThread { [weak self] in
|
||||
// 在主线程更新进度
|
||||
self?.bufferingProgress = progress
|
||||
}
|
||||
}
|
||||
}
|
||||
if duration == 0, playbackState == .playing, loadState == .playable {
|
||||
if let rate = options.liveAdaptivePlaybackRate(loadingState: loadingState) {
|
||||
playbackRate = rate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func sourceDidChange(oldBitRate: Int64, newBitrate: Int64) {
|
||||
KSLog("oldBitRate \(oldBitRate) change to newBitrate \(newBitrate)")
|
||||
}
|
||||
}
|
||||
|
||||
extension KSMEPlayer: MediaPlayerProtocol {
|
||||
public var chapters: [Chapter] {
|
||||
playerItem.chapters
|
||||
}
|
||||
|
||||
public var subtitleDataSouce: SubtitleDataSouce? { self }
|
||||
public var playbackVolume: Float {
|
||||
get {
|
||||
audioOutput.volume
|
||||
}
|
||||
set {
|
||||
audioOutput.volume = newValue
|
||||
}
|
||||
}
|
||||
|
||||
public var isPlaying: Bool { playbackState == .playing }
|
||||
|
||||
@MainActor
|
||||
public var naturalSize: CGSize {
|
||||
options.display == .plane ? playerItem.naturalSize : KSOptions.sceneSize
|
||||
}
|
||||
|
||||
public var isExternalPlaybackActive: Bool { false }
|
||||
|
||||
public var view: UIView? { videoOutput }
|
||||
|
||||
public func replace(url: URL, options: KSOptions) {
|
||||
KSLog("replaceUrl \(self)")
|
||||
shutdown()
|
||||
playerItem.delegate = nil
|
||||
playerItem = MEPlayerItem(url: url, options: options)
|
||||
if options.videoDisable {
|
||||
videoOutput = nil
|
||||
} else if videoOutput == nil {
|
||||
videoOutput = KSOptions.videoPlayerType.init(options: options)
|
||||
videoOutput?.displayLayerDelegate = self
|
||||
}
|
||||
self.options = options
|
||||
playerItem.delegate = self
|
||||
audioOutput.flush()
|
||||
audioOutput.renderSource = playerItem
|
||||
videoOutput?.renderSource = playerItem
|
||||
videoOutput?.options = options
|
||||
}
|
||||
|
||||
public var currentPlaybackTime: TimeInterval {
|
||||
get {
|
||||
playerItem.currentPlaybackTime
|
||||
}
|
||||
set {
|
||||
seek(time: newValue) { _ in }
|
||||
}
|
||||
}
|
||||
|
||||
public var duration: TimeInterval { playerItem.duration }
|
||||
|
||||
public var fileSize: Double { playerItem.fileSize }
|
||||
|
||||
public var seekable: Bool { playerItem.seekable }
|
||||
|
||||
public var dynamicInfo: DynamicInfo? {
|
||||
playerItem.dynamicInfo
|
||||
}
|
||||
|
||||
public func seek(time: TimeInterval, completion: @escaping ((Bool) -> Void)) {
|
||||
let time = max(time, 0)
|
||||
playbackState = .seeking
|
||||
runOnMainThread { [weak self] in
|
||||
self?.bufferingProgress = 0
|
||||
}
|
||||
let seekTime: TimeInterval
|
||||
if time >= duration, options.isLoopPlay {
|
||||
seekTime = 0
|
||||
} else {
|
||||
seekTime = time
|
||||
}
|
||||
playerItem.seek(time: seekTime) { [weak self] result in
|
||||
guard let self else { return }
|
||||
if result {
|
||||
self.audioOutput.flush()
|
||||
runOnMainThread { [weak self] in
|
||||
guard let self else { return }
|
||||
if let controlTimebase = self.videoOutput?.displayLayer.controlTimebase {
|
||||
CMTimebaseSetTime(controlTimebase, time: CMTimeMake(value: Int64(self.currentPlaybackTime), timescale: 1))
|
||||
}
|
||||
}
|
||||
}
|
||||
completion(result)
|
||||
}
|
||||
}
|
||||
|
||||
public func prepareToPlay() {
|
||||
KSLog("prepareToPlay \(self)")
|
||||
options.prepareTime = CACurrentMediaTime()
|
||||
playerItem.prepareToPlay()
|
||||
bufferingProgress = 0
|
||||
}
|
||||
|
||||
public func play() {
|
||||
KSLog("play \(self)")
|
||||
playbackState = .playing
|
||||
if #available(iOS 15.0, tvOS 15.0, macOS 12.0, *) {
|
||||
pipController?.invalidatePlaybackState()
|
||||
}
|
||||
}
|
||||
|
||||
public func pause() {
|
||||
KSLog("pause \(self)")
|
||||
playbackState = .paused
|
||||
if #available(iOS 15.0, tvOS 15.0, macOS 12.0, *) {
|
||||
pipController?.invalidatePlaybackState()
|
||||
}
|
||||
}
|
||||
|
||||
public func shutdown() {
|
||||
KSLog("shutdown \(self)")
|
||||
playbackState = .stopped
|
||||
loadState = .idle
|
||||
isReadyToPlay = false
|
||||
loopCount = 0
|
||||
playerItem.shutdown()
|
||||
options.prepareTime = 0
|
||||
options.dnsStartTime = 0
|
||||
options.tcpStartTime = 0
|
||||
options.tcpConnectedTime = 0
|
||||
options.openTime = 0
|
||||
options.findTime = 0
|
||||
options.readyTime = 0
|
||||
options.readAudioTime = 0
|
||||
options.readVideoTime = 0
|
||||
options.decodeAudioTime = 0
|
||||
options.decodeVideoTime = 0
|
||||
if KSOptions.isClearVideoWhereReplace {
|
||||
videoOutput?.flush()
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
public var contentMode: UIViewContentMode {
|
||||
get {
|
||||
view?.contentMode ?? .center
|
||||
}
|
||||
set {
|
||||
view?.contentMode = newValue
|
||||
}
|
||||
}
|
||||
|
||||
public func thumbnailImageAtCurrentTime() async -> CGImage? {
|
||||
videoOutput?.pixelBuffer?.cgImage()
|
||||
}
|
||||
|
||||
public func enterBackground() {}
|
||||
|
||||
public func enterForeground() {}
|
||||
|
||||
public var isMuted: Bool {
|
||||
get {
|
||||
audioOutput.isMuted
|
||||
}
|
||||
set {
|
||||
audioOutput.isMuted = newValue
|
||||
}
|
||||
}
|
||||
|
||||
public func tracks(mediaType: AVFoundation.AVMediaType) -> [MediaPlayerTrack] {
|
||||
playerItem.assetTracks.compactMap { track -> MediaPlayerTrack? in
|
||||
if track.mediaType == mediaType {
|
||||
return track
|
||||
} else if mediaType == .subtitle {
|
||||
return track.closedCaptionsTrack
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public func select(track: some MediaPlayerTrack) {
|
||||
let isSeek = playerItem.select(track: track)
|
||||
if isSeek {
|
||||
audioOutput.flush()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@available(tvOS 14.0, *)
|
||||
extension KSMEPlayer: AVPictureInPictureSampleBufferPlaybackDelegate {
|
||||
public func pictureInPictureController(_: AVPictureInPictureController, setPlaying playing: Bool) {
|
||||
playing ? play() : pause()
|
||||
}
|
||||
|
||||
public func pictureInPictureControllerTimeRangeForPlayback(_: AVPictureInPictureController) -> CMTimeRange {
|
||||
// Handle live streams.
|
||||
if duration == 0 {
|
||||
return CMTimeRange(start: .negativeInfinity, duration: .positiveInfinity)
|
||||
}
|
||||
return CMTimeRange(start: 0, end: duration)
|
||||
}
|
||||
|
||||
public func pictureInPictureControllerIsPlaybackPaused(_: AVPictureInPictureController) -> Bool {
|
||||
!isPlaying
|
||||
}
|
||||
|
||||
public func pictureInPictureController(_: AVPictureInPictureController, didTransitionToRenderSize _: CMVideoDimensions) {}
|
||||
public func pictureInPictureController(_: AVPictureInPictureController, skipByInterval skipInterval: CMTime) async {
|
||||
seek(time: currentPlaybackTime + skipInterval.seconds) { _ in }
|
||||
}
|
||||
|
||||
public func pictureInPictureControllerShouldProhibitBackgroundAudioPlayback(_: AVPictureInPictureController) -> Bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
@available(macOS 12.0, iOS 15.0, tvOS 15.0, *)
|
||||
extension KSMEPlayer: AVPlaybackCoordinatorPlaybackControlDelegate {
|
||||
public func playbackCoordinator(_: AVDelegatingPlaybackCoordinator, didIssue playCommand: AVDelegatingPlaybackCoordinatorPlayCommand, completionHandler: @escaping () -> Void) {
|
||||
guard playCommand.expectedCurrentItemIdentifier == (playbackCoordinator as? AVDelegatingPlaybackCoordinator)?.currentItemIdentifier else {
|
||||
completionHandler()
|
||||
return
|
||||
}
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if self.playbackState != .playing {
|
||||
self.play()
|
||||
}
|
||||
completionHandler()
|
||||
}
|
||||
}
|
||||
|
||||
public func playbackCoordinator(_: AVDelegatingPlaybackCoordinator, didIssue pauseCommand: AVDelegatingPlaybackCoordinatorPauseCommand, completionHandler: @escaping () -> Void) {
|
||||
guard pauseCommand.expectedCurrentItemIdentifier == (playbackCoordinator as? AVDelegatingPlaybackCoordinator)?.currentItemIdentifier else {
|
||||
completionHandler()
|
||||
return
|
||||
}
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if self.playbackState != .paused {
|
||||
self.pause()
|
||||
}
|
||||
completionHandler()
|
||||
}
|
||||
}
|
||||
|
||||
public func playbackCoordinator(_: AVDelegatingPlaybackCoordinator, didIssue seekCommand: AVDelegatingPlaybackCoordinatorSeekCommand) async {
|
||||
guard seekCommand.expectedCurrentItemIdentifier == (playbackCoordinator as? AVDelegatingPlaybackCoordinator)?.currentItemIdentifier else {
|
||||
return
|
||||
}
|
||||
let seekTime = fmod(seekCommand.itemTime.seconds, duration)
|
||||
if abs(currentPlaybackTime - seekTime) < CGFLOAT_EPSILON {
|
||||
return
|
||||
}
|
||||
seek(time: seekTime) { _ in }
|
||||
}
|
||||
|
||||
public func playbackCoordinator(_: AVDelegatingPlaybackCoordinator, didIssue bufferingCommand: AVDelegatingPlaybackCoordinatorBufferingCommand, completionHandler: @escaping () -> Void) {
|
||||
guard bufferingCommand.expectedCurrentItemIdentifier == (playbackCoordinator as? AVDelegatingPlaybackCoordinator)?.currentItemIdentifier else {
|
||||
completionHandler()
|
||||
return
|
||||
}
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
guard self.loadState != .playable, let countDown = bufferingCommand.completionDueDate?.timeIntervalSinceNow else {
|
||||
completionHandler()
|
||||
return
|
||||
}
|
||||
self.bufferingCountDownTimer?.invalidate()
|
||||
self.bufferingCountDownTimer = nil
|
||||
self.bufferingCountDownTimer = Timer(timeInterval: countDown, repeats: false) { _ in
|
||||
completionHandler()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension KSMEPlayer: DisplayLayerDelegate {
|
||||
public func change(displayLayer: AVSampleBufferDisplayLayer) {
|
||||
if #available(iOS 15.0, tvOS 15.0, macOS 12.0, *) {
|
||||
let contentSource = AVPictureInPictureController.ContentSource(sampleBufferDisplayLayer: displayLayer, playbackDelegate: self)
|
||||
_pipController = KSPictureInPictureController(contentSource: contentSource)
|
||||
// 更改contentSource会直接crash
|
||||
// pipController?.contentSource = contentSource
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public extension KSMEPlayer {
|
||||
func startRecord(url: URL) {
|
||||
playerItem.startRecord(url: url)
|
||||
}
|
||||
|
||||
func stoptRecord() {
|
||||
playerItem.stopRecord()
|
||||
}
|
||||
}
|
||||
881
KSPlayer-main/Sources/KSPlayer/MEPlayer/MEPlayerItem.swift
Normal file
881
KSPlayer-main/Sources/KSPlayer/MEPlayer/MEPlayerItem.swift
Normal file
@@ -0,0 +1,881 @@
|
||||
//
|
||||
// MEPlayerItem.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import FFmpegKit
|
||||
import Libavcodec
|
||||
import Libavfilter
|
||||
import Libavformat
|
||||
|
||||
public final class MEPlayerItem: Sendable {
|
||||
private let url: URL
|
||||
private let options: KSOptions
|
||||
private let operationQueue = OperationQueue()
|
||||
private let condition = NSCondition()
|
||||
private var formatCtx: UnsafeMutablePointer<AVFormatContext>?
|
||||
private var outputFormatCtx: UnsafeMutablePointer<AVFormatContext>?
|
||||
private var outputPacket: UnsafeMutablePointer<AVPacket>?
|
||||
private var streamMapping = [Int: Int]()
|
||||
private var openOperation: BlockOperation?
|
||||
private var readOperation: BlockOperation?
|
||||
private var closeOperation: BlockOperation?
|
||||
private var seekingCompletionHandler: ((Bool) -> Void)?
|
||||
// 没有音频数据可以渲染
|
||||
private var isAudioStalled = true
|
||||
private var audioClock = KSClock()
|
||||
private var videoClock = KSClock()
|
||||
private var isFirst = true
|
||||
private var isSeek = false
|
||||
private var allPlayerItemTracks = [PlayerItemTrackProtocol]()
|
||||
private var maxFrameDuration = 10.0
|
||||
private var videoAudioTracks = [CapacityProtocol]()
|
||||
private var videoTrack: SyncPlayerItemTrack<VideoVTBFrame>?
|
||||
private var audioTrack: SyncPlayerItemTrack<AudioFrame>?
|
||||
private(set) var assetTracks = [FFmpegAssetTrack]()
|
||||
private var videoAdaptation: VideoAdaptationState?
|
||||
private var videoDisplayCount = UInt8(0)
|
||||
private var seekByBytes = false
|
||||
private var lastVideoDisplayTime = CACurrentMediaTime()
|
||||
public private(set) var chapters: [Chapter] = []
|
||||
public var currentPlaybackTime: TimeInterval {
|
||||
state == .seeking ? seekTime : (mainClock().time - startTime).seconds
|
||||
}
|
||||
|
||||
private var seekTime = TimeInterval(0)
|
||||
private var startTime = CMTime.zero
|
||||
public private(set) var duration: TimeInterval = 0
|
||||
public private(set) var fileSize: Double = 0
|
||||
public private(set) var naturalSize = CGSize.zero
|
||||
private var error: NSError? {
|
||||
didSet {
|
||||
if error != nil {
|
||||
state = .failed
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private var state = MESourceState.idle {
|
||||
didSet {
|
||||
switch state {
|
||||
case .opened:
|
||||
delegate?.sourceDidOpened()
|
||||
case .reading:
|
||||
timer.fireDate = Date.distantPast
|
||||
case .closed:
|
||||
timer.invalidate()
|
||||
case .failed:
|
||||
delegate?.sourceDidFailed(error: error)
|
||||
timer.fireDate = Date.distantFuture
|
||||
case .idle, .opening, .seeking, .paused, .finished:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private lazy var timer: Timer = .scheduledTimer(withTimeInterval: 0.05, repeats: true) { [weak self] _ in
|
||||
self?.codecDidChangeCapacity()
|
||||
}
|
||||
|
||||
lazy var dynamicInfo = DynamicInfo { [weak self] in
|
||||
// metadata可能会实时变化。所以把它放在DynamicInfo里面
|
||||
toDictionary(self?.formatCtx?.pointee.metadata)
|
||||
} bytesRead: { [weak self] in
|
||||
self?.formatCtx?.pointee.pb?.pointee.bytes_read ?? 0
|
||||
} audioBitrate: { [weak self] in
|
||||
Int(8 * (self?.audioTrack?.bitrate ?? 0))
|
||||
} videoBitrate: { [weak self] in
|
||||
Int(8 * (self?.videoTrack?.bitrate ?? 0))
|
||||
}
|
||||
|
||||
private static var onceInitial: Void = {
|
||||
var result = avformat_network_init()
|
||||
av_log_set_callback { ptr, level, format, args in
|
||||
guard let format else {
|
||||
return
|
||||
}
|
||||
var log = String(cString: format)
|
||||
let arguments: CVaListPointer? = args
|
||||
if let arguments {
|
||||
log = NSString(format: log, arguments: arguments) as String
|
||||
}
|
||||
if let ptr {
|
||||
let avclass = ptr.assumingMemoryBound(to: UnsafePointer<AVClass>.self).pointee
|
||||
if avclass == avfilter_get_class() {
|
||||
let context = ptr.assumingMemoryBound(to: AVFilterContext.self).pointee
|
||||
if let opaque = context.graph?.pointee.opaque {
|
||||
let options = Unmanaged<KSOptions>.fromOpaque(opaque).takeUnretainedValue()
|
||||
options.filter(log: log)
|
||||
}
|
||||
}
|
||||
}
|
||||
// 找不到解码器
|
||||
if log.hasPrefix("parser not found for codec") {
|
||||
KSLog(level: .error, log)
|
||||
}
|
||||
KSLog(level: LogLevel(rawValue: level) ?? .warning, log)
|
||||
}
|
||||
}()
|
||||
|
||||
weak var delegate: MEPlayerDelegate?
|
||||
public init(url: URL, options: KSOptions) {
|
||||
self.url = url
|
||||
self.options = options
|
||||
timer.fireDate = Date.distantFuture
|
||||
operationQueue.name = "KSPlayer_" + String(describing: self).components(separatedBy: ".").last!
|
||||
operationQueue.maxConcurrentOperationCount = 1
|
||||
operationQueue.qualityOfService = .userInteractive
|
||||
_ = MEPlayerItem.onceInitial
|
||||
}
|
||||
|
||||
func select(track: some MediaPlayerTrack) -> Bool {
|
||||
if track.isEnabled {
|
||||
return false
|
||||
}
|
||||
assetTracks.filter { $0.mediaType == track.mediaType }.forEach {
|
||||
$0.isEnabled = track === $0
|
||||
}
|
||||
guard let assetTrack = track as? FFmpegAssetTrack else {
|
||||
return false
|
||||
}
|
||||
if assetTrack.mediaType == .video {
|
||||
findBestAudio(videoTrack: assetTrack)
|
||||
} else if assetTrack.mediaType == .subtitle {
|
||||
if assetTrack.isImageSubtitle {
|
||||
if !options.isSeekImageSubtitle {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
seek(time: currentPlaybackTime) { _ in
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: private functions
|
||||
|
||||
extension MEPlayerItem {
|
||||
private func openThread() {
|
||||
avformat_close_input(&self.formatCtx)
|
||||
formatCtx = avformat_alloc_context()
|
||||
guard let formatCtx else {
|
||||
error = NSError(errorCode: .formatCreate)
|
||||
return
|
||||
}
|
||||
var interruptCB = AVIOInterruptCB()
|
||||
interruptCB.opaque = Unmanaged.passUnretained(self).toOpaque()
|
||||
interruptCB.callback = { ctx -> Int32 in
|
||||
guard let ctx else {
|
||||
return 0
|
||||
}
|
||||
let formatContext = Unmanaged<MEPlayerItem>.fromOpaque(ctx).takeUnretainedValue()
|
||||
switch formatContext.state {
|
||||
case .finished, .closed, .failed:
|
||||
return 1
|
||||
default:
|
||||
return 0
|
||||
}
|
||||
}
|
||||
formatCtx.pointee.interrupt_callback = interruptCB
|
||||
// avformat_close_input这个函数会调用io_close2。但是自定义协议是不会调用io_close2这个函数
|
||||
// formatCtx.pointee.io_close2 = { _, _ -> Int32 in
|
||||
// 0
|
||||
// }
|
||||
setHttpProxy()
|
||||
var avOptions = options.formatContextOptions.avOptions
|
||||
if let pb = options.process(url: url) {
|
||||
// 如果要自定义协议的话,那就用avio_alloc_context,对formatCtx.pointee.pb赋值
|
||||
formatCtx.pointee.pb = pb.getContext()
|
||||
}
|
||||
let urlString: String
|
||||
if url.isFileURL {
|
||||
urlString = url.path
|
||||
} else {
|
||||
urlString = url.absoluteString
|
||||
}
|
||||
var result = avformat_open_input(&self.formatCtx, urlString, nil, &avOptions)
|
||||
av_dict_free(&avOptions)
|
||||
if result == AVError.eof.code {
|
||||
state = .finished
|
||||
delegate?.sourceDidFinished()
|
||||
return
|
||||
}
|
||||
guard result == 0 else {
|
||||
error = .init(errorCode: .formatOpenInput, avErrorCode: result)
|
||||
avformat_close_input(&self.formatCtx)
|
||||
return
|
||||
}
|
||||
options.openTime = CACurrentMediaTime()
|
||||
formatCtx.pointee.flags |= AVFMT_FLAG_GENPTS
|
||||
if options.nobuffer {
|
||||
formatCtx.pointee.flags |= AVFMT_FLAG_NOBUFFER
|
||||
}
|
||||
if let probesize = options.probesize {
|
||||
formatCtx.pointee.probesize = probesize
|
||||
}
|
||||
if let maxAnalyzeDuration = options.maxAnalyzeDuration {
|
||||
formatCtx.pointee.max_analyze_duration = maxAnalyzeDuration
|
||||
}
|
||||
result = avformat_find_stream_info(formatCtx, nil)
|
||||
guard result == 0 else {
|
||||
error = .init(errorCode: .formatFindStreamInfo, avErrorCode: result)
|
||||
avformat_close_input(&self.formatCtx)
|
||||
return
|
||||
}
|
||||
// FIXME: hack, ffplay maybe should not use avio_feof() to test for the end
|
||||
formatCtx.pointee.pb?.pointee.eof_reached = 0
|
||||
let flags = formatCtx.pointee.iformat.pointee.flags
|
||||
maxFrameDuration = flags & AVFMT_TS_DISCONT == AVFMT_TS_DISCONT ? 10.0 : 3600.0
|
||||
options.findTime = CACurrentMediaTime()
|
||||
options.formatName = String(cString: formatCtx.pointee.iformat.pointee.name)
|
||||
seekByBytes = (flags & AVFMT_NO_BYTE_SEEK == 0) && (flags & AVFMT_TS_DISCONT != 0) && options.formatName != "ogg"
|
||||
if formatCtx.pointee.start_time != Int64.min {
|
||||
startTime = CMTime(value: formatCtx.pointee.start_time, timescale: AV_TIME_BASE)
|
||||
videoClock.time = startTime
|
||||
audioClock.time = startTime
|
||||
}
|
||||
duration = TimeInterval(max(formatCtx.pointee.duration, 0) / Int64(AV_TIME_BASE))
|
||||
fileSize = Double(formatCtx.pointee.bit_rate) * duration / 8
|
||||
createCodec(formatCtx: formatCtx)
|
||||
if formatCtx.pointee.nb_chapters > 0 {
|
||||
chapters.removeAll()
|
||||
for i in 0 ..< formatCtx.pointee.nb_chapters {
|
||||
if let chapter = formatCtx.pointee.chapters[Int(i)]?.pointee {
|
||||
let timeBase = Timebase(chapter.time_base)
|
||||
let start = timeBase.cmtime(for: chapter.start).seconds
|
||||
let end = timeBase.cmtime(for: chapter.end).seconds
|
||||
let metadata = toDictionary(chapter.metadata)
|
||||
let title = metadata["title"] ?? ""
|
||||
chapters.append(Chapter(start: start, end: end, title: title))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let outputURL = options.outputURL {
|
||||
startRecord(url: outputURL)
|
||||
}
|
||||
if videoTrack == nil, audioTrack == nil {
|
||||
state = .failed
|
||||
} else {
|
||||
state = .opened
|
||||
read()
|
||||
}
|
||||
}
|
||||
|
||||
func startRecord(url: URL) {
|
||||
stopRecord()
|
||||
let filename = url.isFileURL ? url.path : url.absoluteString
|
||||
var ret = avformat_alloc_output_context2(&outputFormatCtx, nil, nil, filename)
|
||||
guard let outputFormatCtx, let formatCtx else {
|
||||
KSLog(NSError(errorCode: .formatOutputCreate, avErrorCode: ret))
|
||||
return
|
||||
}
|
||||
var index = 0
|
||||
var audioIndex: Int?
|
||||
var videoIndex: Int?
|
||||
let formatName = outputFormatCtx.pointee.oformat.pointee.name.flatMap { String(cString: $0) }
|
||||
for i in 0 ..< Int(formatCtx.pointee.nb_streams) {
|
||||
if let inputStream = formatCtx.pointee.streams[i] {
|
||||
let codecType = inputStream.pointee.codecpar.pointee.codec_type
|
||||
if [AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_VIDEO, AVMEDIA_TYPE_SUBTITLE].contains(codecType) {
|
||||
if codecType == AVMEDIA_TYPE_AUDIO {
|
||||
if let audioIndex {
|
||||
streamMapping[i] = audioIndex
|
||||
continue
|
||||
} else {
|
||||
audioIndex = index
|
||||
}
|
||||
} else if codecType == AVMEDIA_TYPE_VIDEO {
|
||||
if let videoIndex {
|
||||
streamMapping[i] = videoIndex
|
||||
continue
|
||||
} else {
|
||||
videoIndex = index
|
||||
}
|
||||
}
|
||||
if let outStream = avformat_new_stream(outputFormatCtx, nil) {
|
||||
streamMapping[i] = index
|
||||
index += 1
|
||||
avcodec_parameters_copy(outStream.pointee.codecpar, inputStream.pointee.codecpar)
|
||||
if codecType == AVMEDIA_TYPE_SUBTITLE, formatName == "mp4" || formatName == "mov" {
|
||||
outStream.pointee.codecpar.pointee.codec_id = AV_CODEC_ID_MOV_TEXT
|
||||
}
|
||||
if inputStream.pointee.codecpar.pointee.codec_id == AV_CODEC_ID_HEVC {
|
||||
outStream.pointee.codecpar.pointee.codec_tag = CMFormatDescription.MediaSubType.hevc.rawValue.bigEndian
|
||||
} else {
|
||||
outStream.pointee.codecpar.pointee.codec_tag = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
avio_open(&(outputFormatCtx.pointee.pb), filename, AVIO_FLAG_WRITE)
|
||||
ret = avformat_write_header(outputFormatCtx, nil)
|
||||
guard ret >= 0 else {
|
||||
KSLog(NSError(errorCode: .formatWriteHeader, avErrorCode: ret))
|
||||
avformat_close_input(&self.outputFormatCtx)
|
||||
return
|
||||
}
|
||||
outputPacket = av_packet_alloc()
|
||||
}
|
||||
|
||||
private func createCodec(formatCtx: UnsafeMutablePointer<AVFormatContext>) {
|
||||
allPlayerItemTracks.removeAll()
|
||||
assetTracks.removeAll()
|
||||
videoAdaptation = nil
|
||||
videoTrack = nil
|
||||
audioTrack = nil
|
||||
videoAudioTracks.removeAll()
|
||||
assetTracks = (0 ..< Int(formatCtx.pointee.nb_streams)).compactMap { i in
|
||||
if let coreStream = formatCtx.pointee.streams[i] {
|
||||
coreStream.pointee.discard = AVDISCARD_ALL
|
||||
if let assetTrack = FFmpegAssetTrack(stream: coreStream) {
|
||||
if assetTrack.mediaType == .subtitle {
|
||||
let subtitle = SyncPlayerItemTrack<SubtitleFrame>(mediaType: .subtitle, frameCapacity: 255, options: options)
|
||||
assetTrack.subtitle = subtitle
|
||||
allPlayerItemTracks.append(subtitle)
|
||||
}
|
||||
assetTrack.seekByBytes = seekByBytes
|
||||
return assetTrack
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
var videoIndex: Int32 = -1
|
||||
if !options.videoDisable {
|
||||
let videos = assetTracks.filter { $0.mediaType == .video }
|
||||
let wantedStreamNb: Int32
|
||||
if !videos.isEmpty, let index = options.wantedVideo(tracks: videos) {
|
||||
wantedStreamNb = videos[index].trackID
|
||||
} else {
|
||||
wantedStreamNb = -1
|
||||
}
|
||||
videoIndex = av_find_best_stream(formatCtx, AVMEDIA_TYPE_VIDEO, wantedStreamNb, -1, nil, 0)
|
||||
if let first = videos.first(where: { $0.trackID == videoIndex }) {
|
||||
first.isEnabled = true
|
||||
let rotation = first.rotation
|
||||
if rotation > 0, options.autoRotate {
|
||||
options.hardwareDecode = false
|
||||
if abs(rotation - 90) <= 1 {
|
||||
options.videoFilters.append("transpose=clock")
|
||||
} else if abs(rotation - 180) <= 1 {
|
||||
options.videoFilters.append("hflip")
|
||||
options.videoFilters.append("vflip")
|
||||
} else if abs(rotation - 270) <= 1 {
|
||||
options.videoFilters.append("transpose=cclock")
|
||||
} else if abs(rotation) > 1 {
|
||||
options.videoFilters.append("rotate=\(rotation)*PI/180")
|
||||
}
|
||||
}
|
||||
naturalSize = abs(rotation - 90) <= 1 || abs(rotation - 270) <= 1 ? first.naturalSize.reverse : first.naturalSize
|
||||
options.process(assetTrack: first)
|
||||
let frameCapacity = options.videoFrameMaxCount(fps: first.nominalFrameRate, naturalSize: naturalSize, isLive: duration == 0)
|
||||
let track = options.syncDecodeVideo ? SyncPlayerItemTrack<VideoVTBFrame>(mediaType: .video, frameCapacity: frameCapacity, options: options) : AsyncPlayerItemTrack<VideoVTBFrame>(mediaType: .video, frameCapacity: frameCapacity, options: options)
|
||||
track.delegate = self
|
||||
allPlayerItemTracks.append(track)
|
||||
videoTrack = track
|
||||
if first.codecpar.codec_id != AV_CODEC_ID_MJPEG {
|
||||
videoAudioTracks.append(track)
|
||||
}
|
||||
let bitRates = videos.map(\.bitRate).filter {
|
||||
$0 > 0
|
||||
}
|
||||
if bitRates.count > 1, options.videoAdaptable {
|
||||
let bitRateState = VideoAdaptationState.BitRateState(bitRate: first.bitRate, time: CACurrentMediaTime())
|
||||
videoAdaptation = VideoAdaptationState(bitRates: bitRates.sorted(by: <), duration: duration, fps: first.nominalFrameRate, bitRateStates: [bitRateState])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let audios = assetTracks.filter { $0.mediaType == .audio }
|
||||
let wantedStreamNb: Int32
|
||||
if !audios.isEmpty, let index = options.wantedAudio(tracks: audios) {
|
||||
wantedStreamNb = audios[index].trackID
|
||||
} else {
|
||||
wantedStreamNb = -1
|
||||
}
|
||||
let index = av_find_best_stream(formatCtx, AVMEDIA_TYPE_AUDIO, wantedStreamNb, videoIndex, nil, 0)
|
||||
if let first = audios.first(where: {
|
||||
index > 0 ? $0.trackID == index : true
|
||||
}), first.codecpar.codec_id != AV_CODEC_ID_NONE {
|
||||
first.isEnabled = true
|
||||
options.process(assetTrack: first)
|
||||
// 音频要比较所有的音轨,因为truehd的fps是1200,跟其他的音轨差距太大了
|
||||
let fps = audios.map(\.nominalFrameRate).max() ?? 44
|
||||
let frameCapacity = options.audioFrameMaxCount(fps: fps, channelCount: Int(first.audioDescriptor?.audioFormat.channelCount ?? 2))
|
||||
let track = options.syncDecodeAudio ? SyncPlayerItemTrack<AudioFrame>(mediaType: .audio, frameCapacity: frameCapacity, options: options) : AsyncPlayerItemTrack<AudioFrame>(mediaType: .audio, frameCapacity: frameCapacity, options: options)
|
||||
track.delegate = self
|
||||
allPlayerItemTracks.append(track)
|
||||
audioTrack = track
|
||||
videoAudioTracks.append(track)
|
||||
isAudioStalled = false
|
||||
}
|
||||
}
|
||||
|
||||
private func read() {
|
||||
readOperation = BlockOperation { [weak self] in
|
||||
guard let self else { return }
|
||||
Thread.current.name = (self.operationQueue.name ?? "") + "_read"
|
||||
Thread.current.stackSize = KSOptions.stackSize
|
||||
self.readThread()
|
||||
}
|
||||
readOperation?.queuePriority = .veryHigh
|
||||
readOperation?.qualityOfService = .userInteractive
|
||||
if let readOperation {
|
||||
operationQueue.addOperation(readOperation)
|
||||
}
|
||||
}
|
||||
|
||||
private func readThread() {
|
||||
if state == .opened {
|
||||
if options.startPlayTime > 0 {
|
||||
let timestamp = startTime + CMTime(seconds: options.startPlayTime)
|
||||
let flags = seekByBytes ? AVSEEK_FLAG_BYTE : 0
|
||||
let seekStartTime = CACurrentMediaTime()
|
||||
let result = avformat_seek_file(formatCtx, -1, Int64.min, timestamp.value, Int64.max, flags)
|
||||
audioClock.time = timestamp
|
||||
videoClock.time = timestamp
|
||||
KSLog("start PlayTime: \(timestamp.seconds) spend Time: \(CACurrentMediaTime() - seekStartTime)")
|
||||
}
|
||||
state = .reading
|
||||
}
|
||||
allPlayerItemTracks.forEach { $0.decode() }
|
||||
while [MESourceState.paused, .seeking, .reading].contains(state) {
|
||||
if state == .paused {
|
||||
condition.wait()
|
||||
}
|
||||
if state == .seeking {
|
||||
let seekToTime = seekTime
|
||||
let time = mainClock().time
|
||||
var increase = Int64(seekTime + startTime.seconds - time.seconds)
|
||||
var seekFlags = options.seekFlags
|
||||
let timeStamp: Int64
|
||||
if seekByBytes {
|
||||
seekFlags |= AVSEEK_FLAG_BYTE
|
||||
if let bitRate = formatCtx?.pointee.bit_rate {
|
||||
increase = increase * bitRate / 8
|
||||
} else {
|
||||
increase *= 180_000
|
||||
}
|
||||
var position = Int64(-1)
|
||||
if position < 0 {
|
||||
position = videoClock.position
|
||||
}
|
||||
if position < 0 {
|
||||
position = audioClock.position
|
||||
}
|
||||
if position < 0 {
|
||||
position = avio_tell(formatCtx?.pointee.pb)
|
||||
}
|
||||
timeStamp = position + increase
|
||||
} else {
|
||||
increase *= Int64(AV_TIME_BASE)
|
||||
timeStamp = Int64(time.seconds) * Int64(AV_TIME_BASE) + increase
|
||||
}
|
||||
let seekMin = increase > 0 ? timeStamp - increase + 2 : Int64.min
|
||||
let seekMax = increase < 0 ? timeStamp - increase - 2 : Int64.max
|
||||
// can not seek to key frame
|
||||
let seekStartTime = CACurrentMediaTime()
|
||||
var result = avformat_seek_file(formatCtx, -1, seekMin, timeStamp, seekMax, seekFlags)
|
||||
// var result = av_seek_frame(formatCtx, -1, timeStamp, seekFlags)
|
||||
// When seeking before the beginning of the file, and seeking fails,
|
||||
// try again without the backwards flag to make it seek to the
|
||||
// beginning.
|
||||
if result < 0, seekFlags & AVSEEK_FLAG_BACKWARD == AVSEEK_FLAG_BACKWARD {
|
||||
KSLog("seek to \(seekToTime) failed. seekFlags remove BACKWARD")
|
||||
options.seekFlags &= ~AVSEEK_FLAG_BACKWARD
|
||||
seekFlags &= ~AVSEEK_FLAG_BACKWARD
|
||||
result = avformat_seek_file(formatCtx, -1, seekMin, timeStamp, seekMax, seekFlags)
|
||||
}
|
||||
KSLog("seek to \(seekToTime) spend Time: \(CACurrentMediaTime() - seekStartTime)")
|
||||
if state == .closed {
|
||||
break
|
||||
}
|
||||
if seekToTime != seekTime {
|
||||
continue
|
||||
}
|
||||
isSeek = true
|
||||
allPlayerItemTracks.forEach { $0.seek(time: seekToTime) }
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.seekingCompletionHandler?(result >= 0)
|
||||
self.seekingCompletionHandler = nil
|
||||
}
|
||||
audioClock.time = CMTime(seconds: seekToTime, preferredTimescale: time.timescale) + startTime
|
||||
videoClock.time = CMTime(seconds: seekToTime, preferredTimescale: time.timescale) + startTime
|
||||
state = .reading
|
||||
} else if state == .reading {
|
||||
autoreleasepool {
|
||||
_ = reading()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func reading() -> Int32 {
|
||||
let packet = Packet()
|
||||
guard let corePacket = packet.corePacket else {
|
||||
return 0
|
||||
}
|
||||
let readResult = av_read_frame(formatCtx, corePacket)
|
||||
if state == .closed {
|
||||
return 0
|
||||
}
|
||||
if readResult == 0 {
|
||||
if let outputFormatCtx, let formatCtx {
|
||||
let index = Int(corePacket.pointee.stream_index)
|
||||
if let outputIndex = streamMapping[index],
|
||||
let inputTb = formatCtx.pointee.streams[index]?.pointee.time_base,
|
||||
let outputTb = outputFormatCtx.pointee.streams[outputIndex]?.pointee.time_base,
|
||||
let outputPacket
|
||||
{
|
||||
av_packet_ref(outputPacket, corePacket)
|
||||
outputPacket.pointee.stream_index = Int32(outputIndex)
|
||||
av_packet_rescale_ts(outputPacket, inputTb, outputTb)
|
||||
outputPacket.pointee.pos = -1
|
||||
let ret = av_interleaved_write_frame(outputFormatCtx, outputPacket)
|
||||
if ret < 0 {
|
||||
KSLog("can not av_interleaved_write_frame")
|
||||
}
|
||||
}
|
||||
}
|
||||
if corePacket.pointee.size <= 0 {
|
||||
return 0
|
||||
}
|
||||
let first = assetTracks.first { $0.trackID == corePacket.pointee.stream_index }
|
||||
if let first, first.isEnabled {
|
||||
packet.assetTrack = first
|
||||
if first.mediaType == .video {
|
||||
if options.readVideoTime == 0 {
|
||||
options.readVideoTime = CACurrentMediaTime()
|
||||
}
|
||||
videoTrack?.putPacket(packet: packet)
|
||||
} else if first.mediaType == .audio {
|
||||
if options.readAudioTime == 0 {
|
||||
options.readAudioTime = CACurrentMediaTime()
|
||||
}
|
||||
audioTrack?.putPacket(packet: packet)
|
||||
} else {
|
||||
first.subtitle?.putPacket(packet: packet)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if readResult == AVError.eof.code || avio_feof(formatCtx?.pointee.pb) > 0 {
|
||||
if options.isLoopPlay, allPlayerItemTracks.allSatisfy({ !$0.isLoopModel }) {
|
||||
allPlayerItemTracks.forEach { $0.isLoopModel = true }
|
||||
_ = av_seek_frame(formatCtx, -1, startTime.value, AVSEEK_FLAG_BACKWARD)
|
||||
} else {
|
||||
allPlayerItemTracks.forEach { $0.isEndOfFile = true }
|
||||
state = .finished
|
||||
}
|
||||
} else {
|
||||
// if IS_AVERROR_INVALIDDATA(readResult)
|
||||
error = .init(errorCode: .readFrame, avErrorCode: readResult)
|
||||
}
|
||||
}
|
||||
return readResult
|
||||
}
|
||||
|
||||
private func pause() {
|
||||
if state == .reading {
|
||||
state = .paused
|
||||
}
|
||||
}
|
||||
|
||||
private func resume() {
|
||||
if state == .paused {
|
||||
state = .reading
|
||||
condition.signal()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: MediaPlayback
|
||||
|
||||
extension MEPlayerItem: MediaPlayback {
|
||||
var seekable: Bool {
|
||||
guard let formatCtx else {
|
||||
return false
|
||||
}
|
||||
var seekable = true
|
||||
if let ioContext = formatCtx.pointee.pb {
|
||||
seekable = ioContext.pointee.seekable > 0
|
||||
}
|
||||
return seekable
|
||||
}
|
||||
|
||||
public func prepareToPlay() {
|
||||
state = .opening
|
||||
openOperation = BlockOperation { [weak self] in
|
||||
guard let self else { return }
|
||||
Thread.current.name = (self.operationQueue.name ?? "") + "_open"
|
||||
Thread.current.stackSize = KSOptions.stackSize
|
||||
self.openThread()
|
||||
}
|
||||
openOperation?.queuePriority = .veryHigh
|
||||
openOperation?.qualityOfService = .userInteractive
|
||||
if let openOperation {
|
||||
operationQueue.addOperation(openOperation)
|
||||
}
|
||||
}
|
||||
|
||||
public func shutdown() {
|
||||
guard state != .closed else { return }
|
||||
state = .closed
|
||||
av_packet_free(&outputPacket)
|
||||
stopRecord()
|
||||
// 故意循环引用。等结束了。才释放
|
||||
let closeOperation = BlockOperation {
|
||||
Thread.current.name = (self.operationQueue.name ?? "") + "_close"
|
||||
self.allPlayerItemTracks.forEach { $0.shutdown() }
|
||||
KSLog("清空formatCtx")
|
||||
// 自定义的协议才会av_class为空
|
||||
if let formatCtx = self.formatCtx, (formatCtx.pointee.flags & AVFMT_FLAG_CUSTOM_IO) != 0, let opaque = formatCtx.pointee.pb.pointee.opaque {
|
||||
let value = Unmanaged<AbstractAVIOContext>.fromOpaque(opaque).takeRetainedValue()
|
||||
value.close()
|
||||
}
|
||||
// 不要自己来释放pb。不然第二次播放同一个url会出问题
|
||||
// self.formatCtx?.pointee.pb = nil
|
||||
self.formatCtx?.pointee.interrupt_callback.opaque = nil
|
||||
self.formatCtx?.pointee.interrupt_callback.callback = nil
|
||||
avformat_close_input(&self.formatCtx)
|
||||
avformat_close_input(&self.outputFormatCtx)
|
||||
self.duration = 0
|
||||
self.closeOperation = nil
|
||||
self.operationQueue.cancelAllOperations()
|
||||
}
|
||||
closeOperation.queuePriority = .veryHigh
|
||||
closeOperation.qualityOfService = .userInteractive
|
||||
if let readOperation {
|
||||
readOperation.cancel()
|
||||
closeOperation.addDependency(readOperation)
|
||||
} else if let openOperation {
|
||||
openOperation.cancel()
|
||||
closeOperation.addDependency(openOperation)
|
||||
}
|
||||
operationQueue.addOperation(closeOperation)
|
||||
condition.signal()
|
||||
if options.syncDecodeVideo || options.syncDecodeAudio {
|
||||
DispatchQueue.global().async { [weak self] in
|
||||
self?.allPlayerItemTracks.forEach { $0.shutdown() }
|
||||
}
|
||||
}
|
||||
self.closeOperation = closeOperation
|
||||
}
|
||||
|
||||
func stopRecord() {
|
||||
if let outputFormatCtx {
|
||||
av_write_trailer(outputFormatCtx)
|
||||
}
|
||||
}
|
||||
|
||||
public func seek(time: TimeInterval, completion: @escaping ((Bool) -> Void)) {
|
||||
if state == .reading || state == .paused {
|
||||
seekTime = time
|
||||
state = .seeking
|
||||
seekingCompletionHandler = completion
|
||||
condition.broadcast()
|
||||
allPlayerItemTracks.forEach { $0.seek(time: time) }
|
||||
} else if state == .finished {
|
||||
seekTime = time
|
||||
state = .seeking
|
||||
seekingCompletionHandler = completion
|
||||
read()
|
||||
} else if state == .seeking {
|
||||
seekTime = time
|
||||
seekingCompletionHandler = completion
|
||||
}
|
||||
isAudioStalled = audioTrack == nil
|
||||
}
|
||||
}
|
||||
|
||||
extension MEPlayerItem: CodecCapacityDelegate {
|
||||
func codecDidChangeCapacity() {
|
||||
let loadingState = options.playable(capacitys: videoAudioTracks, isFirst: isFirst, isSeek: isSeek)
|
||||
delegate?.sourceDidChange(loadingState: loadingState)
|
||||
if loadingState.isPlayable {
|
||||
isFirst = false
|
||||
isSeek = false
|
||||
if loadingState.loadedTime > options.maxBufferDuration {
|
||||
adaptableVideo(loadingState: loadingState)
|
||||
pause()
|
||||
} else if loadingState.loadedTime < options.maxBufferDuration / 2 {
|
||||
resume()
|
||||
}
|
||||
} else {
|
||||
resume()
|
||||
adaptableVideo(loadingState: loadingState)
|
||||
}
|
||||
}
|
||||
|
||||
func codecDidFinished(track: some CapacityProtocol) {
|
||||
if track.mediaType == .audio {
|
||||
isAudioStalled = true
|
||||
}
|
||||
let allSatisfy = videoAudioTracks.allSatisfy { $0.isEndOfFile && $0.frameCount == 0 && $0.packetCount == 0 }
|
||||
if allSatisfy {
|
||||
delegate?.sourceDidFinished()
|
||||
timer.fireDate = Date.distantFuture
|
||||
if options.isLoopPlay {
|
||||
isAudioStalled = audioTrack == nil
|
||||
audioTrack?.isLoopModel = false
|
||||
videoTrack?.isLoopModel = false
|
||||
if state == .finished {
|
||||
seek(time: 0) { _ in }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func adaptableVideo(loadingState: LoadingState) {
|
||||
if options.videoDisable || videoAdaptation == nil || loadingState.isEndOfFile || loadingState.isSeek || state == .seeking {
|
||||
return
|
||||
}
|
||||
guard let track = videoTrack else {
|
||||
return
|
||||
}
|
||||
videoAdaptation?.loadedCount = track.packetCount + track.frameCount
|
||||
videoAdaptation?.currentPlaybackTime = currentPlaybackTime
|
||||
videoAdaptation?.isPlayable = loadingState.isPlayable
|
||||
guard let (oldBitRate, newBitrate) = options.adaptable(state: videoAdaptation), oldBitRate != newBitrate,
|
||||
let newFFmpegAssetTrack = assetTracks.first(where: { $0.mediaType == .video && $0.bitRate == newBitrate })
|
||||
else {
|
||||
return
|
||||
}
|
||||
assetTracks.first { $0.mediaType == .video && $0.bitRate == oldBitRate }?.isEnabled = false
|
||||
newFFmpegAssetTrack.isEnabled = true
|
||||
findBestAudio(videoTrack: newFFmpegAssetTrack)
|
||||
let bitRateState = VideoAdaptationState.BitRateState(bitRate: newBitrate, time: CACurrentMediaTime())
|
||||
videoAdaptation?.bitRateStates.append(bitRateState)
|
||||
delegate?.sourceDidChange(oldBitRate: oldBitRate, newBitrate: newBitrate)
|
||||
}
|
||||
|
||||
private func findBestAudio(videoTrack: FFmpegAssetTrack) {
|
||||
guard videoAdaptation != nil, let first = assetTracks.first(where: { $0.mediaType == .audio && $0.isEnabled }) else {
|
||||
return
|
||||
}
|
||||
let index = av_find_best_stream(formatCtx, AVMEDIA_TYPE_AUDIO, -1, videoTrack.trackID, nil, 0)
|
||||
if index != first.trackID {
|
||||
first.isEnabled = false
|
||||
assetTracks.first { $0.mediaType == .audio && $0.trackID == index }?.isEnabled = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MEPlayerItem: OutputRenderSourceDelegate {
|
||||
func mainClock() -> KSClock {
|
||||
isAudioStalled ? videoClock : audioClock
|
||||
}
|
||||
|
||||
public func setVideo(time: CMTime, position: Int64) {
|
||||
// print("[video] video interval \(CACurrentMediaTime() - videoClock.lastMediaTime) video diff \(time.seconds - videoClock.time.seconds)")
|
||||
videoClock.time = time
|
||||
videoClock.position = position
|
||||
videoDisplayCount += 1
|
||||
let diff = videoClock.lastMediaTime - lastVideoDisplayTime
|
||||
if diff > 1 {
|
||||
dynamicInfo.displayFPS = Double(videoDisplayCount) / diff
|
||||
videoDisplayCount = 0
|
||||
lastVideoDisplayTime = videoClock.lastMediaTime
|
||||
}
|
||||
}
|
||||
|
||||
public func setAudio(time: CMTime, position: Int64) {
|
||||
// print("[audio] setAudio: \(time.seconds)")
|
||||
// 切换到主线程的话,那播放起来会更顺滑
|
||||
runOnMainThread {
|
||||
self.audioClock.time = time
|
||||
self.audioClock.position = position
|
||||
}
|
||||
}
|
||||
|
||||
public func getVideoOutputRender(force: Bool) -> VideoVTBFrame? {
|
||||
guard let videoTrack else {
|
||||
return nil
|
||||
}
|
||||
var type: ClockProcessType = force ? .next : .remain
|
||||
let predicate: ((VideoVTBFrame, Int) -> Bool)? = force ? nil : { [weak self] frame, count -> Bool in
|
||||
guard let self else { return true }
|
||||
(self.dynamicInfo.audioVideoSyncDiff, type) = self.options.videoClockSync(main: self.mainClock(), nextVideoTime: frame.seconds, fps: Double(frame.fps), frameCount: count)
|
||||
return type != .remain
|
||||
}
|
||||
let frame = videoTrack.getOutputRender(where: predicate)
|
||||
switch type {
|
||||
case .remain:
|
||||
break
|
||||
case .next:
|
||||
break
|
||||
case .dropNextFrame:
|
||||
if videoTrack.getOutputRender(where: nil) != nil {
|
||||
dynamicInfo.droppedVideoFrameCount += 1
|
||||
}
|
||||
case .flush:
|
||||
let count = videoTrack.outputRenderQueue.count
|
||||
videoTrack.outputRenderQueue.flush()
|
||||
dynamicInfo.droppedVideoFrameCount += UInt32(count)
|
||||
case .seek:
|
||||
videoTrack.outputRenderQueue.flush()
|
||||
videoTrack.seekTime = mainClock().time.seconds
|
||||
case .dropNextPacket:
|
||||
if let videoTrack = videoTrack as? AsyncPlayerItemTrack {
|
||||
let packet = videoTrack.packetQueue.pop { item, _ -> Bool in
|
||||
!item.isKeyFrame
|
||||
}
|
||||
if packet != nil {
|
||||
dynamicInfo.droppedVideoPacketCount += 1
|
||||
}
|
||||
}
|
||||
case .dropGOPPacket:
|
||||
if let videoTrack = videoTrack as? AsyncPlayerItemTrack {
|
||||
var packet: Packet? = nil
|
||||
repeat {
|
||||
packet = videoTrack.packetQueue.pop { item, _ -> Bool in
|
||||
!item.isKeyFrame
|
||||
}
|
||||
if packet != nil {
|
||||
dynamicInfo.droppedVideoPacketCount += 1
|
||||
}
|
||||
} while packet != nil
|
||||
}
|
||||
}
|
||||
return frame
|
||||
}
|
||||
|
||||
public func getAudioOutputRender() -> AudioFrame? {
|
||||
if let frame = audioTrack?.getOutputRender(where: nil) {
|
||||
SubtitleModel.audioRecognizes.first {
|
||||
$0.isEnabled
|
||||
}?.append(frame: frame)
|
||||
return frame
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AbstractAVIOContext {
|
||||
func getContext() -> UnsafeMutablePointer<AVIOContext> {
|
||||
// 需要持有ioContext,不然会被释放掉,等到shutdown在清空
|
||||
avio_alloc_context(av_malloc(Int(bufferSize)), bufferSize, writable ? 1 : 0, Unmanaged.passRetained(self).toOpaque()) { opaque, buffer, size -> Int32 in
|
||||
let value = Unmanaged<AbstractAVIOContext>.fromOpaque(opaque!).takeUnretainedValue()
|
||||
let ret = value.read(buffer: buffer, size: size)
|
||||
return Int32(ret)
|
||||
} _: { opaque, buffer, size -> Int32 in
|
||||
let value = Unmanaged<AbstractAVIOContext>.fromOpaque(opaque!).takeUnretainedValue()
|
||||
let ret = value.write(buffer: buffer, size: size)
|
||||
return Int32(ret)
|
||||
} _: { opaque, offset, whence -> Int64 in
|
||||
let value = Unmanaged<AbstractAVIOContext>.fromOpaque(opaque!).takeUnretainedValue()
|
||||
if whence == AVSEEK_SIZE {
|
||||
return value.fileSize()
|
||||
}
|
||||
return value.seek(offset: offset, whence: whence)
|
||||
}
|
||||
}
|
||||
}
|
||||
316
KSPlayer-main/Sources/KSPlayer/MEPlayer/MEPlayerItemTrack.swift
Normal file
316
KSPlayer-main/Sources/KSPlayer/MEPlayer/MEPlayerItemTrack.swift
Normal file
@@ -0,0 +1,316 @@
|
||||
//
|
||||
// Decoder.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
import AVFoundation
|
||||
import CoreMedia
|
||||
import Libavformat
|
||||
|
||||
protocol PlayerItemTrackProtocol: CapacityProtocol, AnyObject {
|
||||
init(mediaType: AVFoundation.AVMediaType, frameCapacity: UInt8, options: KSOptions)
|
||||
// 是否无缝循环
|
||||
var isLoopModel: Bool { get set }
|
||||
var isEndOfFile: Bool { get set }
|
||||
var delegate: CodecCapacityDelegate? { get set }
|
||||
func decode()
|
||||
func seek(time: TimeInterval)
|
||||
func putPacket(packet: Packet)
|
||||
// func getOutputRender<Frame: ObjectQueueItem>(where predicate: ((Frame) -> Bool)?) -> Frame?
|
||||
func shutdown()
|
||||
}
|
||||
|
||||
class SyncPlayerItemTrack<Frame: MEFrame>: PlayerItemTrackProtocol, CustomStringConvertible {
|
||||
var seekTime = 0.0
|
||||
fileprivate let options: KSOptions
|
||||
fileprivate var decoderMap = [Int32: DecodeProtocol]()
|
||||
fileprivate var state = MECodecState.idle {
|
||||
didSet {
|
||||
if state == .finished {
|
||||
seekTime = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var isEndOfFile: Bool = false
|
||||
var packetCount: Int { 0 }
|
||||
let description: String
|
||||
weak var delegate: CodecCapacityDelegate?
|
||||
let mediaType: AVFoundation.AVMediaType
|
||||
let outputRenderQueue: CircularBuffer<Frame>
|
||||
var isLoopModel = false
|
||||
var frameCount: Int { outputRenderQueue.count }
|
||||
var frameMaxCount: Int {
|
||||
outputRenderQueue.maxCount
|
||||
}
|
||||
|
||||
var fps: Float {
|
||||
outputRenderQueue.fps
|
||||
}
|
||||
|
||||
required init(mediaType: AVFoundation.AVMediaType, frameCapacity: UInt8, options: KSOptions) {
|
||||
self.options = options
|
||||
self.mediaType = mediaType
|
||||
description = mediaType.rawValue
|
||||
// 默认缓存队列大小跟帧率挂钩,经测试除以4,最优
|
||||
if mediaType == .audio {
|
||||
outputRenderQueue = CircularBuffer(initialCapacity: Int(frameCapacity), expanding: false)
|
||||
} else if mediaType == .video {
|
||||
outputRenderQueue = CircularBuffer(initialCapacity: Int(frameCapacity), sorted: true, expanding: false)
|
||||
} else {
|
||||
// 有的图片字幕不按顺序来输出,所以要排序下。
|
||||
outputRenderQueue = CircularBuffer(initialCapacity: Int(frameCapacity), sorted: true)
|
||||
}
|
||||
}
|
||||
|
||||
func decode() {
|
||||
isEndOfFile = false
|
||||
state = .decoding
|
||||
}
|
||||
|
||||
func seek(time: TimeInterval) {
|
||||
if options.isAccurateSeek {
|
||||
seekTime = time
|
||||
} else {
|
||||
seekTime = 0
|
||||
}
|
||||
isEndOfFile = false
|
||||
state = .flush
|
||||
outputRenderQueue.flush()
|
||||
isLoopModel = false
|
||||
}
|
||||
|
||||
func putPacket(packet: Packet) {
|
||||
if state == .flush {
|
||||
decoderMap.values.forEach { $0.doFlushCodec() }
|
||||
state = .decoding
|
||||
}
|
||||
if state == .decoding {
|
||||
doDecode(packet: packet)
|
||||
}
|
||||
}
|
||||
|
||||
func getOutputRender(where predicate: ((Frame, Int) -> Bool)?) -> Frame? {
|
||||
let outputFecthRender = outputRenderQueue.pop(where: predicate)
|
||||
if outputFecthRender == nil {
|
||||
if state == .finished, frameCount == 0 {
|
||||
delegate?.codecDidFinished(track: self)
|
||||
}
|
||||
}
|
||||
return outputFecthRender
|
||||
}
|
||||
|
||||
func shutdown() {
|
||||
if state == .idle {
|
||||
return
|
||||
}
|
||||
state = .closed
|
||||
outputRenderQueue.shutdown()
|
||||
}
|
||||
|
||||
private var lastPacketBytes = Int32(0)
|
||||
private var lastPacketSeconds = Double(-1)
|
||||
var bitrate = Double(0)
|
||||
fileprivate func doDecode(packet: Packet) {
|
||||
if packet.isKeyFrame, packet.assetTrack.mediaType != .subtitle {
|
||||
let seconds = packet.seconds
|
||||
let diff = seconds - lastPacketSeconds
|
||||
if lastPacketSeconds < 0 || diff < 0 {
|
||||
bitrate = 0
|
||||
lastPacketBytes = 0
|
||||
lastPacketSeconds = seconds
|
||||
} else if diff > 1 {
|
||||
bitrate = Double(lastPacketBytes) / diff
|
||||
lastPacketBytes = 0
|
||||
lastPacketSeconds = seconds
|
||||
}
|
||||
}
|
||||
lastPacketBytes += packet.size
|
||||
let decoder = decoderMap.value(for: packet.assetTrack.trackID, default: makeDecode(assetTrack: packet.assetTrack))
|
||||
// var startTime = CACurrentMediaTime()
|
||||
decoder.decodeFrame(from: packet) { [weak self] result in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
do {
|
||||
// if packet.assetTrack.mediaType == .video {
|
||||
// print("[video] decode time: \(CACurrentMediaTime()-startTime)")
|
||||
// startTime = CACurrentMediaTime()
|
||||
// }
|
||||
let frame = try result.get()
|
||||
if self.state == .flush || self.state == .closed {
|
||||
return
|
||||
}
|
||||
if self.seekTime > 0 {
|
||||
let timestamp = frame.timestamp + frame.duration
|
||||
// KSLog("seektime \(self.seekTime), frame \(frame.seconds), mediaType \(packet.assetTrack.mediaType)")
|
||||
if timestamp <= 0 || frame.timebase.cmtime(for: timestamp).seconds < self.seekTime {
|
||||
return
|
||||
} else {
|
||||
self.seekTime = 0.0
|
||||
}
|
||||
}
|
||||
if let frame = frame as? Frame {
|
||||
self.outputRenderQueue.push(frame)
|
||||
self.outputRenderQueue.fps = packet.assetTrack.nominalFrameRate
|
||||
}
|
||||
} catch {
|
||||
KSLog("Decoder did Failed : \(error)")
|
||||
if decoder is VideoToolboxDecode {
|
||||
decoder.shutdown()
|
||||
self.decoderMap[packet.assetTrack.trackID] = FFmpegDecode(assetTrack: packet.assetTrack, options: self.options)
|
||||
KSLog("VideoCodec switch to software decompression")
|
||||
self.doDecode(packet: packet)
|
||||
} else {
|
||||
self.state = .failed
|
||||
}
|
||||
}
|
||||
}
|
||||
if options.decodeAudioTime == 0, mediaType == .audio {
|
||||
options.decodeAudioTime = CACurrentMediaTime()
|
||||
}
|
||||
if options.decodeVideoTime == 0, mediaType == .video {
|
||||
options.decodeVideoTime = CACurrentMediaTime()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final class AsyncPlayerItemTrack<Frame: MEFrame>: SyncPlayerItemTrack<Frame> {
|
||||
private let operationQueue = OperationQueue()
|
||||
private var decodeOperation: BlockOperation!
|
||||
// 无缝播放使用的PacketQueue
|
||||
private var loopPacketQueue: CircularBuffer<Packet>?
|
||||
var packetQueue = CircularBuffer<Packet>()
|
||||
override var packetCount: Int { packetQueue.count }
|
||||
override var isLoopModel: Bool {
|
||||
didSet {
|
||||
if isLoopModel {
|
||||
loopPacketQueue = CircularBuffer<Packet>()
|
||||
isEndOfFile = true
|
||||
} else {
|
||||
if let loopPacketQueue {
|
||||
packetQueue.shutdown()
|
||||
packetQueue = loopPacketQueue
|
||||
self.loopPacketQueue = nil
|
||||
if decodeOperation.isFinished {
|
||||
decode()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
required init(mediaType: AVFoundation.AVMediaType, frameCapacity: UInt8, options: KSOptions) {
|
||||
super.init(mediaType: mediaType, frameCapacity: frameCapacity, options: options)
|
||||
operationQueue.name = "KSPlayer_" + mediaType.rawValue
|
||||
operationQueue.maxConcurrentOperationCount = 1
|
||||
operationQueue.qualityOfService = .userInteractive
|
||||
}
|
||||
|
||||
override func putPacket(packet: Packet) {
|
||||
if isLoopModel {
|
||||
loopPacketQueue?.push(packet)
|
||||
} else {
|
||||
packetQueue.push(packet)
|
||||
}
|
||||
}
|
||||
|
||||
override func decode() {
|
||||
isEndOfFile = false
|
||||
guard operationQueue.operationCount == 0 else { return }
|
||||
decodeOperation = BlockOperation { [weak self] in
|
||||
guard let self else { return }
|
||||
Thread.current.name = self.operationQueue.name
|
||||
Thread.current.stackSize = KSOptions.stackSize
|
||||
self.decodeThread()
|
||||
}
|
||||
decodeOperation.queuePriority = .veryHigh
|
||||
decodeOperation.qualityOfService = .userInteractive
|
||||
operationQueue.addOperation(decodeOperation)
|
||||
}
|
||||
|
||||
private func decodeThread() {
|
||||
state = .decoding
|
||||
isEndOfFile = false
|
||||
decoderMap.values.forEach { $0.decode() }
|
||||
outerLoop: while !decodeOperation.isCancelled {
|
||||
switch state {
|
||||
case .idle:
|
||||
break outerLoop
|
||||
case .finished, .closed, .failed:
|
||||
decoderMap.values.forEach { $0.shutdown() }
|
||||
decoderMap.removeAll()
|
||||
break outerLoop
|
||||
case .flush:
|
||||
decoderMap.values.forEach { $0.doFlushCodec() }
|
||||
state = .decoding
|
||||
case .decoding:
|
||||
if isEndOfFile, packetQueue.count == 0 {
|
||||
state = .finished
|
||||
} else {
|
||||
guard let packet = packetQueue.pop(wait: true), state != .flush, state != .closed else {
|
||||
continue
|
||||
}
|
||||
autoreleasepool {
|
||||
doDecode(packet: packet)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override func seek(time: TimeInterval) {
|
||||
if decodeOperation.isFinished {
|
||||
decode()
|
||||
}
|
||||
packetQueue.flush()
|
||||
super.seek(time: time)
|
||||
loopPacketQueue = nil
|
||||
}
|
||||
|
||||
override func shutdown() {
|
||||
if state == .idle {
|
||||
return
|
||||
}
|
||||
super.shutdown()
|
||||
packetQueue.shutdown()
|
||||
}
|
||||
}
|
||||
|
||||
public extension Dictionary {
|
||||
mutating func value(for key: Key, default defaultValue: @autoclosure () -> Value) -> Value {
|
||||
if let value = self[key] {
|
||||
return value
|
||||
} else {
|
||||
let value = defaultValue()
|
||||
self[key] = value
|
||||
return value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protocol DecodeProtocol {
|
||||
func decode()
|
||||
func decodeFrame(from packet: Packet, completionHandler: @escaping (Result<MEFrame, Error>) -> Void)
|
||||
func doFlushCodec()
|
||||
func shutdown()
|
||||
}
|
||||
|
||||
extension SyncPlayerItemTrack {
|
||||
func makeDecode(assetTrack: FFmpegAssetTrack) -> DecodeProtocol {
|
||||
autoreleasepool {
|
||||
if mediaType == .subtitle {
|
||||
return SubtitleDecode(assetTrack: assetTrack, options: options)
|
||||
} else {
|
||||
if mediaType == .video, options.asynchronousDecompression, options.hardwareDecode,
|
||||
let session = DecompressionSession(assetTrack: assetTrack, options: options)
|
||||
{
|
||||
return VideoToolboxDecode(options: options, session: session)
|
||||
} else {
|
||||
return FFmpegDecode(assetTrack: assetTrack, options: options)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
446
KSPlayer-main/Sources/KSPlayer/MEPlayer/MetalPlayView.swift
Normal file
446
KSPlayer-main/Sources/KSPlayer/MEPlayer/MetalPlayView.swift
Normal file
@@ -0,0 +1,446 @@
|
||||
//
|
||||
// MetalPlayView.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/11.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Combine
|
||||
import CoreMedia
|
||||
#if canImport(MetalKit)
|
||||
import MetalKit
|
||||
#endif
|
||||
public protocol DisplayLayerDelegate: NSObjectProtocol {
|
||||
func change(displayLayer: AVSampleBufferDisplayLayer)
|
||||
}
|
||||
|
||||
public protocol VideoOutput: FrameOutput {
|
||||
var displayLayerDelegate: DisplayLayerDelegate? { get set }
|
||||
var options: KSOptions { get set }
|
||||
var displayLayer: AVSampleBufferDisplayLayer { get }
|
||||
var pixelBuffer: PixelBufferProtocol? { get }
|
||||
init(options: KSOptions)
|
||||
func invalidate()
|
||||
func readNextFrame()
|
||||
}
|
||||
|
||||
public final class MetalPlayView: UIView, VideoOutput {
|
||||
public var displayLayer: AVSampleBufferDisplayLayer {
|
||||
displayView.displayLayer
|
||||
}
|
||||
|
||||
private var isDovi: Bool = false
|
||||
private var formatDescription: CMFormatDescription? {
|
||||
didSet {
|
||||
options.updateVideo(refreshRate: fps, isDovi: isDovi, formatDescription: formatDescription)
|
||||
}
|
||||
}
|
||||
|
||||
private var fps = Float(60) {
|
||||
didSet {
|
||||
if fps != oldValue {
|
||||
if KSOptions.preferredFrame {
|
||||
let preferredFramesPerSecond = ceil(fps)
|
||||
if #available(iOS 15.0, tvOS 15.0, macOS 14.0, *) {
|
||||
displayLink.preferredFrameRateRange = CAFrameRateRange(minimum: preferredFramesPerSecond, maximum: 2 * preferredFramesPerSecond, __preferred: preferredFramesPerSecond)
|
||||
} else {
|
||||
displayLink.preferredFramesPerSecond = Int(preferredFramesPerSecond) << 1
|
||||
}
|
||||
}
|
||||
options.updateVideo(refreshRate: fps, isDovi: isDovi, formatDescription: formatDescription)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public private(set) var pixelBuffer: PixelBufferProtocol?
|
||||
/// 用displayLink会导致锁屏无法draw,
|
||||
/// 用DispatchSourceTimer的话,在播放4k视频的时候repeat的时间会变长,
|
||||
/// 用MTKView的draw(in:)也是不行,会卡顿
|
||||
private var displayLink: CADisplayLink!
|
||||
// private let timer = DispatchSource.makeTimerSource(queue: DispatchQueue.main)
|
||||
public var options: KSOptions
|
||||
public weak var renderSource: OutputRenderSourceDelegate?
|
||||
// AVSampleBufferAudioRenderer AVSampleBufferRenderSynchronizer AVSampleBufferDisplayLayer
|
||||
var displayView = AVSampleBufferDisplayView() {
|
||||
didSet {
|
||||
displayLayerDelegate?.change(displayLayer: displayView.displayLayer)
|
||||
}
|
||||
}
|
||||
|
||||
private let metalView = MetalView()
|
||||
public weak var displayLayerDelegate: DisplayLayerDelegate?
|
||||
public init(options: KSOptions) {
|
||||
self.options = options
|
||||
super.init(frame: .zero)
|
||||
addSubview(displayView)
|
||||
addSubview(metalView)
|
||||
metalView.isHidden = true
|
||||
// displayLink = CADisplayLink(block: renderFrame)
|
||||
displayLink = CADisplayLink(target: self, selector: #selector(renderFrame))
|
||||
// 一定要用common。不然在视频上面操作view的话,那就会卡顿了。
|
||||
displayLink.add(to: .main, forMode: .common)
|
||||
pause()
|
||||
}
|
||||
|
||||
public func play() {
|
||||
displayLink.isPaused = false
|
||||
}
|
||||
|
||||
public func pause() {
|
||||
displayLink.isPaused = true
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
override public func didAddSubview(_ subview: UIView) {
|
||||
super.didAddSubview(subview)
|
||||
subview.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
subview.leftAnchor.constraint(equalTo: leftAnchor),
|
||||
subview.topAnchor.constraint(equalTo: topAnchor),
|
||||
subview.bottomAnchor.constraint(equalTo: bottomAnchor),
|
||||
subview.rightAnchor.constraint(equalTo: rightAnchor),
|
||||
])
|
||||
}
|
||||
|
||||
override public var contentMode: UIViewContentMode {
|
||||
didSet {
|
||||
metalView.contentMode = contentMode
|
||||
switch contentMode {
|
||||
case .scaleToFill:
|
||||
displayView.displayLayer.videoGravity = .resize
|
||||
case .scaleAspectFit, .center:
|
||||
displayView.displayLayer.videoGravity = .resizeAspect
|
||||
case .scaleAspectFill:
|
||||
displayView.displayLayer.videoGravity = .resizeAspectFill
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if canImport(UIKit)
|
||||
override public func touchesMoved(_ touches: Set<UITouch>, with: UIEvent?) {
|
||||
if options.display == .plane {
|
||||
super.touchesMoved(touches, with: with)
|
||||
} else {
|
||||
options.display.touchesMoved(touch: touches.first!)
|
||||
}
|
||||
}
|
||||
#else
|
||||
override public func touchesMoved(with event: NSEvent) {
|
||||
if options.display == .plane {
|
||||
super.touchesMoved(with: event)
|
||||
} else {
|
||||
options.display.touchesMoved(touch: event.allTouches().first!)
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
public func flush() {
|
||||
pixelBuffer = nil
|
||||
if displayView.isHidden {
|
||||
metalView.clear()
|
||||
} else {
|
||||
displayView.displayLayer.flushAndRemoveImage()
|
||||
}
|
||||
}
|
||||
|
||||
public func invalidate() {
|
||||
displayLink.invalidate()
|
||||
}
|
||||
|
||||
public func readNextFrame() {
|
||||
draw(force: true)
|
||||
}
|
||||
|
||||
// deinit {
|
||||
// print()
|
||||
// }
|
||||
}
|
||||
|
||||
extension MetalPlayView {
|
||||
@objc private func renderFrame() {
|
||||
draw(force: false)
|
||||
}
|
||||
|
||||
private func draw(force: Bool) {
|
||||
autoreleasepool {
|
||||
guard let frame = renderSource?.getVideoOutputRender(force: force) else {
|
||||
return
|
||||
}
|
||||
pixelBuffer = frame.corePixelBuffer
|
||||
guard let pixelBuffer else {
|
||||
return
|
||||
}
|
||||
isDovi = frame.isDovi
|
||||
fps = frame.fps
|
||||
let cmtime = frame.cmtime
|
||||
let par = pixelBuffer.size
|
||||
let sar = pixelBuffer.aspectRatio
|
||||
if let pixelBuffer = pixelBuffer.cvPixelBuffer, options.isUseDisplayLayer() {
|
||||
if displayView.isHidden {
|
||||
displayView.isHidden = false
|
||||
metalView.isHidden = true
|
||||
metalView.clear()
|
||||
}
|
||||
if let dar = options.customizeDar(sar: sar, par: par) {
|
||||
pixelBuffer.aspectRatio = CGSize(width: dar.width, height: dar.height * par.width / par.height)
|
||||
}
|
||||
checkFormatDescription(pixelBuffer: pixelBuffer)
|
||||
set(pixelBuffer: pixelBuffer, time: cmtime)
|
||||
} else {
|
||||
if !displayView.isHidden {
|
||||
displayView.isHidden = true
|
||||
metalView.isHidden = false
|
||||
displayView.displayLayer.flushAndRemoveImage()
|
||||
}
|
||||
let size: CGSize
|
||||
if options.display == .plane {
|
||||
if let dar = options.customizeDar(sar: sar, par: par) {
|
||||
size = CGSize(width: par.width, height: par.width * dar.height / dar.width)
|
||||
} else {
|
||||
size = CGSize(width: par.width, height: par.height * sar.height / sar.width)
|
||||
}
|
||||
} else {
|
||||
size = KSOptions.sceneSize
|
||||
}
|
||||
checkFormatDescription(pixelBuffer: pixelBuffer)
|
||||
#if !os(tvOS)
|
||||
if #available(iOS 16, *) {
|
||||
metalView.metalLayer.edrMetadata = frame.edrMetadata
|
||||
}
|
||||
#endif
|
||||
metalView.draw(pixelBuffer: pixelBuffer, display: options.display, size: size)
|
||||
}
|
||||
renderSource?.setVideo(time: cmtime, position: frame.position)
|
||||
}
|
||||
}
|
||||
|
||||
private func checkFormatDescription(pixelBuffer: PixelBufferProtocol) {
|
||||
if formatDescription == nil || !pixelBuffer.matche(formatDescription: formatDescription!) {
|
||||
if formatDescription != nil {
|
||||
displayView.removeFromSuperview()
|
||||
displayView = AVSampleBufferDisplayView()
|
||||
displayView.frame = frame
|
||||
addSubview(displayView)
|
||||
}
|
||||
formatDescription = pixelBuffer.formatDescription
|
||||
}
|
||||
}
|
||||
|
||||
private func set(pixelBuffer: CVPixelBuffer, time: CMTime) {
|
||||
guard let formatDescription else { return }
|
||||
displayView.enqueue(imageBuffer: pixelBuffer, formatDescription: formatDescription, time: time)
|
||||
}
|
||||
}
|
||||
|
||||
class MetalView: UIView {
|
||||
private let render = MetalRender()
|
||||
#if canImport(UIKit)
|
||||
override public class var layerClass: AnyClass { CAMetalLayer.self }
|
||||
#endif
|
||||
var metalLayer: CAMetalLayer {
|
||||
// swiftlint:disable force_cast
|
||||
layer as! CAMetalLayer
|
||||
// swiftlint:enable force_cast
|
||||
}
|
||||
|
||||
init() {
|
||||
super.init(frame: .zero)
|
||||
#if !canImport(UIKit)
|
||||
layer = CAMetalLayer()
|
||||
#endif
|
||||
metalLayer.device = MetalRender.device
|
||||
metalLayer.framebufferOnly = true
|
||||
// metalLayer.displaySyncEnabled = false
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func clear() {
|
||||
if let drawable = metalLayer.nextDrawable() {
|
||||
render.clear(drawable: drawable)
|
||||
}
|
||||
}
|
||||
|
||||
func draw(pixelBuffer: PixelBufferProtocol, display: DisplayEnum, size: CGSize) {
|
||||
metalLayer.drawableSize = size
|
||||
metalLayer.pixelFormat = KSOptions.colorPixelFormat(bitDepth: pixelBuffer.bitDepth)
|
||||
let colorspace = pixelBuffer.colorspace
|
||||
if colorspace != nil, metalLayer.colorspace != colorspace {
|
||||
metalLayer.colorspace = colorspace
|
||||
KSLog("[video] CAMetalLayer colorspace \(String(describing: colorspace))")
|
||||
#if !os(tvOS)
|
||||
if #available(iOS 16.0, *) {
|
||||
if let name = colorspace?.name, name != CGColorSpace.sRGB {
|
||||
#if os(macOS)
|
||||
metalLayer.wantsExtendedDynamicRangeContent = window?.screen?.maximumPotentialExtendedDynamicRangeColorComponentValue ?? 1.0 > 1.0
|
||||
#else
|
||||
metalLayer.wantsExtendedDynamicRangeContent = true
|
||||
#endif
|
||||
} else {
|
||||
metalLayer.wantsExtendedDynamicRangeContent = false
|
||||
}
|
||||
KSLog("[video] CAMetalLayer wantsExtendedDynamicRangeContent \(metalLayer.wantsExtendedDynamicRangeContent)")
|
||||
}
|
||||
#endif
|
||||
}
|
||||
guard let drawable = metalLayer.nextDrawable() else {
|
||||
KSLog("[video] CAMetalLayer not readyForMoreMediaData")
|
||||
return
|
||||
}
|
||||
render.draw(pixelBuffer: pixelBuffer, display: display, drawable: drawable)
|
||||
}
|
||||
}
|
||||
|
||||
class AVSampleBufferDisplayView: UIView {
|
||||
#if canImport(UIKit)
|
||||
override public class var layerClass: AnyClass { AVSampleBufferDisplayLayer.self }
|
||||
#endif
|
||||
var displayLayer: AVSampleBufferDisplayLayer {
|
||||
// swiftlint:disable force_cast
|
||||
layer as! AVSampleBufferDisplayLayer
|
||||
// swiftlint:enable force_cast
|
||||
}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
#if !canImport(UIKit)
|
||||
layer = AVSampleBufferDisplayLayer()
|
||||
#endif
|
||||
var controlTimebase: CMTimebase?
|
||||
CMTimebaseCreateWithSourceClock(allocator: kCFAllocatorDefault, sourceClock: CMClockGetHostTimeClock(), timebaseOut: &controlTimebase)
|
||||
if let controlTimebase {
|
||||
displayLayer.controlTimebase = controlTimebase
|
||||
CMTimebaseSetTime(controlTimebase, time: .zero)
|
||||
CMTimebaseSetRate(controlTimebase, rate: 1.0)
|
||||
}
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func enqueue(imageBuffer: CVPixelBuffer, formatDescription: CMVideoFormatDescription, time: CMTime) {
|
||||
let timing = CMSampleTimingInfo(duration: .invalid, presentationTimeStamp: .zero, decodeTimeStamp: .invalid)
|
||||
// var timing = CMSampleTimingInfo(duration: .invalid, presentationTimeStamp: time, decodeTimeStamp: .invalid)
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
CMSampleBufferCreateReadyWithImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: imageBuffer, formatDescription: formatDescription, sampleTiming: [timing], sampleBufferOut: &sampleBuffer)
|
||||
if let sampleBuffer {
|
||||
if let attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: true) as? [NSMutableDictionary], let dic = attachmentsArray.first {
|
||||
dic[kCMSampleAttachmentKey_DisplayImmediately] = true
|
||||
}
|
||||
if displayLayer.isReadyForMoreMediaData {
|
||||
displayLayer.enqueue(sampleBuffer)
|
||||
} else {
|
||||
KSLog("[video] AVSampleBufferDisplayLayer not readyForMoreMediaData. video time \(time), controlTime \(displayLayer.timebase.time) ")
|
||||
displayLayer.enqueue(sampleBuffer)
|
||||
}
|
||||
if #available(macOS 11.0, iOS 14, tvOS 14, *) {
|
||||
if displayLayer.requiresFlushToResumeDecoding {
|
||||
KSLog("[video] AVSampleBufferDisplayLayer requiresFlushToResumeDecoding so flush")
|
||||
displayLayer.flush()
|
||||
}
|
||||
}
|
||||
if displayLayer.status == .failed {
|
||||
KSLog("[video] AVSampleBufferDisplayLayer status failed so flush")
|
||||
displayLayer.flush()
|
||||
// if let error = displayLayer.error as NSError?, error.code == -11847 {
|
||||
// displayLayer.stopRequestingMediaData()
|
||||
// }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if os(macOS)
|
||||
import CoreVideo
|
||||
|
||||
class CADisplayLink {
|
||||
private let displayLink: CVDisplayLink
|
||||
private var runloop: RunLoop?
|
||||
private var mode = RunLoop.Mode.default
|
||||
public var preferredFramesPerSecond = 60
|
||||
@available(macOS 12.0, *)
|
||||
public var preferredFrameRateRange: CAFrameRateRange {
|
||||
get {
|
||||
CAFrameRateRange()
|
||||
}
|
||||
set {}
|
||||
}
|
||||
|
||||
public var timestamp: TimeInterval {
|
||||
var timeStamp = CVTimeStamp()
|
||||
if CVDisplayLinkGetCurrentTime(displayLink, &timeStamp) == kCVReturnSuccess, (timeStamp.flags & CVTimeStampFlags.hostTimeValid.rawValue) != 0 {
|
||||
return TimeInterval(timeStamp.hostTime / NSEC_PER_SEC)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
public var duration: TimeInterval {
|
||||
CVDisplayLinkGetActualOutputVideoRefreshPeriod(displayLink)
|
||||
}
|
||||
|
||||
public var targetTimestamp: TimeInterval {
|
||||
duration + timestamp
|
||||
}
|
||||
|
||||
public var isPaused: Bool {
|
||||
get {
|
||||
!CVDisplayLinkIsRunning(displayLink)
|
||||
}
|
||||
set {
|
||||
if newValue {
|
||||
CVDisplayLinkStop(displayLink)
|
||||
} else {
|
||||
CVDisplayLinkStart(displayLink)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public init(target: NSObject, selector: Selector) {
|
||||
var displayLink: CVDisplayLink?
|
||||
CVDisplayLinkCreateWithActiveCGDisplays(&displayLink)
|
||||
self.displayLink = displayLink!
|
||||
CVDisplayLinkSetOutputHandler(self.displayLink) { [weak self] _, _, _, _, _ in
|
||||
guard let self else { return kCVReturnSuccess }
|
||||
self.runloop?.perform(selector, target: target, argument: self, order: 0, modes: [self.mode])
|
||||
return kCVReturnSuccess
|
||||
}
|
||||
CVDisplayLinkStart(self.displayLink)
|
||||
}
|
||||
|
||||
public init(block: @escaping (() -> Void)) {
|
||||
var displayLink: CVDisplayLink?
|
||||
CVDisplayLinkCreateWithActiveCGDisplays(&displayLink)
|
||||
self.displayLink = displayLink!
|
||||
CVDisplayLinkSetOutputHandler(self.displayLink) { _, _, _, _, _ in
|
||||
block()
|
||||
return kCVReturnSuccess
|
||||
}
|
||||
CVDisplayLinkStart(self.displayLink)
|
||||
}
|
||||
|
||||
open func add(to runloop: RunLoop, forMode mode: RunLoop.Mode) {
|
||||
self.runloop = runloop
|
||||
self.mode = mode
|
||||
}
|
||||
|
||||
public func invalidate() {
|
||||
isPaused = true
|
||||
runloop = nil
|
||||
CVDisplayLinkSetOutputHandler(displayLink) { _, _, _, _, _ in
|
||||
kCVReturnError
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
493
KSPlayer-main/Sources/KSPlayer/MEPlayer/Model.swift
Normal file
493
KSPlayer-main/Sources/KSPlayer/MEPlayer/Model.swift
Normal file
@@ -0,0 +1,493 @@
|
||||
//
|
||||
// Model.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/9.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import CoreMedia
|
||||
import Libavcodec
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#endif
|
||||
|
||||
// MARK: enum
|
||||
|
||||
enum MESourceState {
|
||||
case idle
|
||||
case opening
|
||||
case opened
|
||||
case reading
|
||||
case seeking
|
||||
case paused
|
||||
case finished
|
||||
case closed
|
||||
case failed
|
||||
}
|
||||
|
||||
// MARK: delegate
|
||||
|
||||
public protocol OutputRenderSourceDelegate: AnyObject {
|
||||
func getVideoOutputRender(force: Bool) -> VideoVTBFrame?
|
||||
func getAudioOutputRender() -> AudioFrame?
|
||||
func setAudio(time: CMTime, position: Int64)
|
||||
func setVideo(time: CMTime, position: Int64)
|
||||
}
|
||||
|
||||
protocol CodecCapacityDelegate: AnyObject {
|
||||
func codecDidFinished(track: some CapacityProtocol)
|
||||
}
|
||||
|
||||
protocol MEPlayerDelegate: AnyObject {
|
||||
func sourceDidChange(loadingState: LoadingState)
|
||||
func sourceDidOpened()
|
||||
func sourceDidFailed(error: NSError?)
|
||||
func sourceDidFinished()
|
||||
func sourceDidChange(oldBitRate: Int64, newBitrate: Int64)
|
||||
}
|
||||
|
||||
// MARK: protocol
|
||||
|
||||
public protocol ObjectQueueItem {
|
||||
var timebase: Timebase { get }
|
||||
var timestamp: Int64 { get set }
|
||||
var duration: Int64 { get set }
|
||||
// byte position
|
||||
var position: Int64 { get set }
|
||||
var size: Int32 { get set }
|
||||
}
|
||||
|
||||
extension ObjectQueueItem {
|
||||
var seconds: TimeInterval { cmtime.seconds }
|
||||
var cmtime: CMTime { timebase.cmtime(for: timestamp) }
|
||||
}
|
||||
|
||||
public protocol FrameOutput: AnyObject {
|
||||
var renderSource: OutputRenderSourceDelegate? { get set }
|
||||
func pause()
|
||||
func flush()
|
||||
func play()
|
||||
}
|
||||
|
||||
protocol MEFrame: ObjectQueueItem {
|
||||
var timebase: Timebase { get set }
|
||||
}
|
||||
|
||||
// MARK: model
|
||||
|
||||
// for MEPlayer
|
||||
public extension KSOptions {
|
||||
/// 开启VR模式的陀飞轮
|
||||
static var enableSensor = true
|
||||
static var stackSize = 65536
|
||||
static var isClearVideoWhereReplace = true
|
||||
static var audioPlayerType: AudioOutput.Type = AudioEnginePlayer.self
|
||||
static var videoPlayerType: (VideoOutput & UIView).Type = MetalPlayView.self
|
||||
static var yadifMode = 1
|
||||
static var deInterlaceAddIdet = false
|
||||
static func colorSpace(ycbcrMatrix: CFString?, transferFunction: CFString?) -> CGColorSpace? {
|
||||
switch ycbcrMatrix {
|
||||
case kCVImageBufferYCbCrMatrix_ITU_R_709_2:
|
||||
return CGColorSpace(name: CGColorSpace.itur_709)
|
||||
case kCVImageBufferYCbCrMatrix_ITU_R_601_4:
|
||||
return CGColorSpace(name: CGColorSpace.sRGB)
|
||||
case kCVImageBufferYCbCrMatrix_ITU_R_2020:
|
||||
if transferFunction == kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ {
|
||||
if #available(macOS 11.0, iOS 14.0, tvOS 14.0, *) {
|
||||
return CGColorSpace(name: CGColorSpace.itur_2100_PQ)
|
||||
} else if #available(macOS 10.15.4, iOS 13.4, tvOS 13.4, *) {
|
||||
return CGColorSpace(name: CGColorSpace.itur_2020_PQ)
|
||||
} else {
|
||||
return CGColorSpace(name: CGColorSpace.itur_2020_PQ_EOTF)
|
||||
}
|
||||
} else if transferFunction == kCVImageBufferTransferFunction_ITU_R_2100_HLG {
|
||||
if #available(macOS 11.0, iOS 14.0, tvOS 14.0, *) {
|
||||
return CGColorSpace(name: CGColorSpace.itur_2100_HLG)
|
||||
} else {
|
||||
return CGColorSpace(name: CGColorSpace.itur_2020)
|
||||
}
|
||||
} else {
|
||||
return CGColorSpace(name: CGColorSpace.itur_2020)
|
||||
}
|
||||
default:
|
||||
return CGColorSpace(name: CGColorSpace.sRGB)
|
||||
}
|
||||
}
|
||||
|
||||
static func colorSpace(colorPrimaries: CFString?) -> CGColorSpace? {
|
||||
switch colorPrimaries {
|
||||
case kCVImageBufferColorPrimaries_ITU_R_709_2:
|
||||
return CGColorSpace(name: CGColorSpace.sRGB)
|
||||
case kCVImageBufferColorPrimaries_DCI_P3:
|
||||
if #available(macOS 10.15.4, iOS 13.4, tvOS 13.4, *) {
|
||||
return CGColorSpace(name: CGColorSpace.displayP3_PQ)
|
||||
} else {
|
||||
return CGColorSpace(name: CGColorSpace.displayP3_PQ_EOTF)
|
||||
}
|
||||
case kCVImageBufferColorPrimaries_ITU_R_2020:
|
||||
if #available(macOS 11.0, iOS 14.0, tvOS 14.0, *) {
|
||||
return CGColorSpace(name: CGColorSpace.itur_2100_PQ)
|
||||
} else if #available(macOS 10.15.4, iOS 13.4, tvOS 13.4, *) {
|
||||
return CGColorSpace(name: CGColorSpace.itur_2020_PQ)
|
||||
} else {
|
||||
return CGColorSpace(name: CGColorSpace.itur_2020_PQ_EOTF)
|
||||
}
|
||||
default:
|
||||
return CGColorSpace(name: CGColorSpace.sRGB)
|
||||
}
|
||||
}
|
||||
|
||||
static func pixelFormat(planeCount: Int, bitDepth: Int32) -> [MTLPixelFormat] {
|
||||
if planeCount == 3 {
|
||||
if bitDepth > 8 {
|
||||
return [.r16Unorm, .r16Unorm, .r16Unorm]
|
||||
} else {
|
||||
return [.r8Unorm, .r8Unorm, .r8Unorm]
|
||||
}
|
||||
} else if planeCount == 2 {
|
||||
if bitDepth > 8 {
|
||||
return [.r16Unorm, .rg16Unorm]
|
||||
} else {
|
||||
return [.r8Unorm, .rg8Unorm]
|
||||
}
|
||||
} else {
|
||||
return [colorPixelFormat(bitDepth: bitDepth)]
|
||||
}
|
||||
}
|
||||
|
||||
static func colorPixelFormat(bitDepth: Int32) -> MTLPixelFormat {
|
||||
if bitDepth == 10 {
|
||||
return .bgr10a2Unorm
|
||||
} else {
|
||||
return .bgra8Unorm
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum MECodecState {
|
||||
case idle
|
||||
case decoding
|
||||
case flush
|
||||
case closed
|
||||
case failed
|
||||
case finished
|
||||
}
|
||||
|
||||
public struct Timebase {
|
||||
static let defaultValue = Timebase(num: 1, den: 1)
|
||||
public let num: Int32
|
||||
public let den: Int32
|
||||
func getPosition(from seconds: TimeInterval) -> Int64 { Int64(seconds * TimeInterval(den) / TimeInterval(num)) }
|
||||
|
||||
func cmtime(for timestamp: Int64) -> CMTime { CMTime(value: timestamp * Int64(num), timescale: den) }
|
||||
}
|
||||
|
||||
extension Timebase {
|
||||
public var rational: AVRational { AVRational(num: num, den: den) }
|
||||
|
||||
init(_ rational: AVRational) {
|
||||
num = rational.num
|
||||
den = rational.den
|
||||
}
|
||||
}
|
||||
|
||||
final class Packet: ObjectQueueItem {
|
||||
var duration: Int64 = 0
|
||||
var timestamp: Int64 = 0
|
||||
var position: Int64 = 0
|
||||
var size: Int32 = 0
|
||||
private(set) var corePacket = av_packet_alloc()
|
||||
var timebase: Timebase {
|
||||
assetTrack.timebase
|
||||
}
|
||||
|
||||
var isKeyFrame: Bool {
|
||||
if let corePacket {
|
||||
return corePacket.pointee.flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
var assetTrack: FFmpegAssetTrack! {
|
||||
didSet {
|
||||
guard let packet = corePacket?.pointee else {
|
||||
return
|
||||
}
|
||||
timestamp = packet.pts == Int64.min ? packet.dts : packet.pts
|
||||
position = packet.pos
|
||||
duration = packet.duration
|
||||
size = packet.size
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
av_packet_unref(corePacket)
|
||||
av_packet_free(&corePacket)
|
||||
}
|
||||
}
|
||||
|
||||
final class SubtitleFrame: MEFrame {
|
||||
var timestamp: Int64 = 0
|
||||
var timebase: Timebase
|
||||
var duration: Int64 = 0
|
||||
var position: Int64 = 0
|
||||
var size: Int32 = 0
|
||||
let part: SubtitlePart
|
||||
init(part: SubtitlePart, timebase: Timebase) {
|
||||
self.part = part
|
||||
self.timebase = timebase
|
||||
}
|
||||
}
|
||||
|
||||
public final class AudioFrame: MEFrame {
|
||||
public let dataSize: Int
|
||||
public let audioFormat: AVAudioFormat
|
||||
public internal(set) var timebase = Timebase.defaultValue
|
||||
public var timestamp: Int64 = 0
|
||||
public var duration: Int64 = 0
|
||||
public var position: Int64 = 0
|
||||
public var size: Int32 = 0
|
||||
public var data: [UnsafeMutablePointer<UInt8>?]
|
||||
public var numberOfSamples: UInt32 = 0
|
||||
public init(dataSize: Int, audioFormat: AVAudioFormat) {
|
||||
self.dataSize = dataSize
|
||||
self.audioFormat = audioFormat
|
||||
let count = audioFormat.isInterleaved ? 1 : audioFormat.channelCount
|
||||
data = (0 ..< count).map { _ in
|
||||
UnsafeMutablePointer<UInt8>.allocate(capacity: dataSize)
|
||||
}
|
||||
}
|
||||
|
||||
init(array: [AudioFrame]) {
|
||||
audioFormat = array[0].audioFormat
|
||||
timebase = array[0].timebase
|
||||
timestamp = array[0].timestamp
|
||||
position = array[0].position
|
||||
var dataSize = 0
|
||||
for frame in array {
|
||||
duration += frame.duration
|
||||
dataSize += frame.dataSize
|
||||
size += frame.size
|
||||
numberOfSamples += frame.numberOfSamples
|
||||
}
|
||||
self.dataSize = dataSize
|
||||
let count = audioFormat.isInterleaved ? 1 : audioFormat.channelCount
|
||||
data = (0 ..< count).map { _ in
|
||||
UnsafeMutablePointer<UInt8>.allocate(capacity: dataSize)
|
||||
}
|
||||
var offset = 0
|
||||
for frame in array {
|
||||
for i in 0 ..< data.count {
|
||||
data[i]?.advanced(by: offset).initialize(from: frame.data[i]!, count: frame.dataSize)
|
||||
}
|
||||
offset += frame.dataSize
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
for i in 0 ..< data.count {
|
||||
data[i]?.deinitialize(count: dataSize)
|
||||
data[i]?.deallocate()
|
||||
}
|
||||
data.removeAll()
|
||||
}
|
||||
|
||||
public func toFloat() -> [ContiguousArray<Float>] {
|
||||
var array = [ContiguousArray<Float>]()
|
||||
for i in 0 ..< data.count {
|
||||
switch audioFormat.commonFormat {
|
||||
case .pcmFormatInt16:
|
||||
let capacity = dataSize / MemoryLayout<Int16>.size
|
||||
data[i]?.withMemoryRebound(to: Int16.self, capacity: capacity) { src in
|
||||
var des = ContiguousArray<Float>(repeating: 0, count: Int(capacity))
|
||||
for j in 0 ..< capacity {
|
||||
des[j] = max(-1.0, min(Float(src[j]) / 32767.0, 1.0))
|
||||
}
|
||||
array.append(des)
|
||||
}
|
||||
case .pcmFormatInt32:
|
||||
let capacity = dataSize / MemoryLayout<Int32>.size
|
||||
data[i]?.withMemoryRebound(to: Int32.self, capacity: capacity) { src in
|
||||
var des = ContiguousArray<Float>(repeating: 0, count: Int(capacity))
|
||||
for j in 0 ..< capacity {
|
||||
des[j] = max(-1.0, min(Float(src[j]) / 2_147_483_647.0, 1.0))
|
||||
}
|
||||
array.append(des)
|
||||
}
|
||||
default:
|
||||
let capacity = dataSize / MemoryLayout<Float>.size
|
||||
data[i]?.withMemoryRebound(to: Float.self, capacity: capacity) { src in
|
||||
var des = ContiguousArray<Float>(repeating: 0, count: Int(capacity))
|
||||
for j in 0 ..< capacity {
|
||||
des[j] = src[j]
|
||||
}
|
||||
array.append(ContiguousArray<Float>(des))
|
||||
}
|
||||
}
|
||||
}
|
||||
return array
|
||||
}
|
||||
|
||||
public func toPCMBuffer() -> AVAudioPCMBuffer? {
|
||||
guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: numberOfSamples) else {
|
||||
return nil
|
||||
}
|
||||
pcmBuffer.frameLength = pcmBuffer.frameCapacity
|
||||
for i in 0 ..< min(Int(pcmBuffer.format.channelCount), data.count) {
|
||||
switch audioFormat.commonFormat {
|
||||
case .pcmFormatInt16:
|
||||
let capacity = dataSize / MemoryLayout<Int16>.size
|
||||
data[i]?.withMemoryRebound(to: Int16.self, capacity: capacity) { src in
|
||||
pcmBuffer.int16ChannelData?[i].update(from: src, count: capacity)
|
||||
}
|
||||
case .pcmFormatInt32:
|
||||
let capacity = dataSize / MemoryLayout<Int32>.size
|
||||
data[i]?.withMemoryRebound(to: Int32.self, capacity: capacity) { src in
|
||||
pcmBuffer.int32ChannelData?[i].update(from: src, count: capacity)
|
||||
}
|
||||
default:
|
||||
let capacity = dataSize / MemoryLayout<Float>.size
|
||||
data[i]?.withMemoryRebound(to: Float.self, capacity: capacity) { src in
|
||||
pcmBuffer.floatChannelData?[i].update(from: src, count: capacity)
|
||||
}
|
||||
}
|
||||
}
|
||||
return pcmBuffer
|
||||
}
|
||||
|
||||
public func toCMSampleBuffer() -> CMSampleBuffer? {
|
||||
var outBlockListBuffer: CMBlockBuffer?
|
||||
CMBlockBufferCreateEmpty(allocator: kCFAllocatorDefault, capacity: UInt32(data.count), flags: 0, blockBufferOut: &outBlockListBuffer)
|
||||
guard let outBlockListBuffer else {
|
||||
return nil
|
||||
}
|
||||
let sampleSize = Int(audioFormat.sampleSize)
|
||||
let sampleCount = CMItemCount(numberOfSamples)
|
||||
let dataByteSize = sampleCount * sampleSize
|
||||
if dataByteSize > dataSize {
|
||||
assertionFailure("dataByteSize: \(dataByteSize),render.dataSize: \(dataSize)")
|
||||
}
|
||||
for i in 0 ..< data.count {
|
||||
var outBlockBuffer: CMBlockBuffer?
|
||||
CMBlockBufferCreateWithMemoryBlock(
|
||||
allocator: kCFAllocatorDefault,
|
||||
memoryBlock: nil,
|
||||
blockLength: dataByteSize,
|
||||
blockAllocator: kCFAllocatorDefault,
|
||||
customBlockSource: nil,
|
||||
offsetToData: 0,
|
||||
dataLength: dataByteSize,
|
||||
flags: kCMBlockBufferAssureMemoryNowFlag,
|
||||
blockBufferOut: &outBlockBuffer
|
||||
)
|
||||
if let outBlockBuffer {
|
||||
CMBlockBufferReplaceDataBytes(
|
||||
with: data[i]!,
|
||||
blockBuffer: outBlockBuffer,
|
||||
offsetIntoDestination: 0,
|
||||
dataLength: dataByteSize
|
||||
)
|
||||
CMBlockBufferAppendBufferReference(
|
||||
outBlockListBuffer,
|
||||
targetBBuf: outBlockBuffer,
|
||||
offsetToData: 0,
|
||||
dataLength: CMBlockBufferGetDataLength(outBlockBuffer),
|
||||
flags: 0
|
||||
)
|
||||
}
|
||||
}
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
// 因为sampleRate跟timescale没有对齐,所以导致杂音。所以要让duration为invalid
|
||||
// let duration = CMTime(value: CMTimeValue(sampleCount), timescale: CMTimeScale(audioFormat.sampleRate))
|
||||
let duration = CMTime.invalid
|
||||
let timing = CMSampleTimingInfo(duration: duration, presentationTimeStamp: cmtime, decodeTimeStamp: .invalid)
|
||||
let sampleSizeEntryCount: CMItemCount
|
||||
let sampleSizeArray: [Int]?
|
||||
if audioFormat.isInterleaved {
|
||||
sampleSizeEntryCount = 1
|
||||
sampleSizeArray = [sampleSize]
|
||||
} else {
|
||||
sampleSizeEntryCount = 0
|
||||
sampleSizeArray = nil
|
||||
}
|
||||
CMSampleBufferCreateReady(allocator: kCFAllocatorDefault, dataBuffer: outBlockListBuffer, formatDescription: audioFormat.formatDescription, sampleCount: sampleCount, sampleTimingEntryCount: 1, sampleTimingArray: [timing], sampleSizeEntryCount: sampleSizeEntryCount, sampleSizeArray: sampleSizeArray, sampleBufferOut: &sampleBuffer)
|
||||
return sampleBuffer
|
||||
}
|
||||
}
|
||||
|
||||
public final class VideoVTBFrame: MEFrame {
|
||||
public var timebase = Timebase.defaultValue
|
||||
// 交叉视频的duration会不准,直接减半了
|
||||
public var duration: Int64 = 0
|
||||
public var position: Int64 = 0
|
||||
public var timestamp: Int64 = 0
|
||||
public var size: Int32 = 0
|
||||
public let fps: Float
|
||||
public let isDovi: Bool
|
||||
public var edrMetaData: EDRMetaData? = nil
|
||||
var corePixelBuffer: PixelBufferProtocol?
|
||||
init(fps: Float, isDovi: Bool) {
|
||||
self.fps = fps
|
||||
self.isDovi = isDovi
|
||||
}
|
||||
}
|
||||
|
||||
extension VideoVTBFrame {
|
||||
#if !os(tvOS)
|
||||
@available(iOS 16, *)
|
||||
var edrMetadata: CAEDRMetadata? {
|
||||
if var contentData = edrMetaData?.contentData, var displayData = edrMetaData?.displayData {
|
||||
let data = Data(bytes: &displayData, count: MemoryLayout<MasteringDisplayMetadata>.stride)
|
||||
let data2 = Data(bytes: &contentData, count: MemoryLayout<ContentLightMetadata>.stride)
|
||||
return CAEDRMetadata.hdr10(displayInfo: data, contentInfo: data2, opticalOutputScale: 10000)
|
||||
}
|
||||
if var ambientViewingEnvironment = edrMetaData?.ambientViewingEnvironment {
|
||||
let data = Data(bytes: &ambientViewingEnvironment, count: MemoryLayout<AmbientViewingEnvironment>.stride)
|
||||
if #available(macOS 14.0, iOS 17.0, *) {
|
||||
return CAEDRMetadata.hlg(ambientViewingEnvironment: data)
|
||||
} else {
|
||||
return CAEDRMetadata.hlg
|
||||
}
|
||||
}
|
||||
if corePixelBuffer?.transferFunction == kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ {
|
||||
return CAEDRMetadata.hdr10(minLuminance: 0.1, maxLuminance: 1000, opticalOutputScale: 10000)
|
||||
} else if corePixelBuffer?.transferFunction == kCVImageBufferTransferFunction_ITU_R_2100_HLG {
|
||||
return CAEDRMetadata.hlg
|
||||
}
|
||||
return nil
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
public struct EDRMetaData {
|
||||
var displayData: MasteringDisplayMetadata?
|
||||
var contentData: ContentLightMetadata?
|
||||
var ambientViewingEnvironment: AmbientViewingEnvironment?
|
||||
}
|
||||
|
||||
public struct MasteringDisplayMetadata {
|
||||
let display_primaries_r_x: UInt16
|
||||
let display_primaries_r_y: UInt16
|
||||
let display_primaries_g_x: UInt16
|
||||
let display_primaries_g_y: UInt16
|
||||
let display_primaries_b_x: UInt16
|
||||
let display_primaries_b_y: UInt16
|
||||
let white_point_x: UInt16
|
||||
let white_point_y: UInt16
|
||||
let minLuminance: UInt32
|
||||
let maxLuminance: UInt32
|
||||
}
|
||||
|
||||
public struct ContentLightMetadata {
|
||||
let MaxCLL: UInt16
|
||||
let MaxFALL: UInt16
|
||||
}
|
||||
|
||||
// https://developer.apple.com/documentation/technotes/tn3145-hdr-video-metadata
|
||||
public struct AmbientViewingEnvironment {
|
||||
let ambient_illuminance: UInt32
|
||||
let ambient_light_x: UInt16
|
||||
let ambient_light_y: UInt16
|
||||
}
|
||||
384
KSPlayer-main/Sources/KSPlayer/MEPlayer/Resample.swift
Normal file
384
KSPlayer-main/Sources/KSPlayer/MEPlayer/Resample.swift
Normal file
@@ -0,0 +1,384 @@
|
||||
//
|
||||
// Resample.swift
|
||||
// KSPlayer-iOS
|
||||
//
|
||||
// Created by kintan on 2020/1/27.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import CoreGraphics
|
||||
import CoreMedia
|
||||
import Libavcodec
|
||||
import Libswresample
|
||||
import Libswscale
|
||||
|
||||
protocol FrameTransfer {
|
||||
func transfer(avframe: UnsafeMutablePointer<AVFrame>) -> UnsafeMutablePointer<AVFrame>
|
||||
func shutdown()
|
||||
}
|
||||
|
||||
protocol FrameChange {
|
||||
func change(avframe: UnsafeMutablePointer<AVFrame>) throws -> MEFrame
|
||||
func shutdown()
|
||||
}
|
||||
|
||||
class VideoSwscale: FrameTransfer {
|
||||
private var imgConvertCtx: OpaquePointer?
|
||||
private var format: AVPixelFormat = AV_PIX_FMT_NONE
|
||||
private var height: Int32 = 0
|
||||
private var width: Int32 = 0
|
||||
private var outFrame: UnsafeMutablePointer<AVFrame>?
|
||||
private func setup(format: AVPixelFormat, width: Int32, height: Int32, linesize _: Int32) {
|
||||
if self.format == format, self.width == width, self.height == height {
|
||||
return
|
||||
}
|
||||
self.format = format
|
||||
self.height = height
|
||||
self.width = width
|
||||
if format.osType() != nil {
|
||||
sws_freeContext(imgConvertCtx)
|
||||
imgConvertCtx = nil
|
||||
outFrame = nil
|
||||
} else {
|
||||
let dstFormat = format.bestPixelFormat
|
||||
imgConvertCtx = sws_getCachedContext(imgConvertCtx, width, height, self.format, width, height, dstFormat, SWS_BICUBIC, nil, nil, nil)
|
||||
outFrame = av_frame_alloc()
|
||||
outFrame?.pointee.format = dstFormat.rawValue
|
||||
outFrame?.pointee.width = width
|
||||
outFrame?.pointee.height = height
|
||||
}
|
||||
}
|
||||
|
||||
func transfer(avframe: UnsafeMutablePointer<AVFrame>) -> UnsafeMutablePointer<AVFrame> {
|
||||
setup(format: AVPixelFormat(rawValue: avframe.pointee.format), width: avframe.pointee.width, height: avframe.pointee.height, linesize: avframe.pointee.linesize.0)
|
||||
if let imgConvertCtx, let outFrame {
|
||||
sws_scale_frame(imgConvertCtx, outFrame, avframe)
|
||||
return outFrame
|
||||
}
|
||||
return avframe
|
||||
}
|
||||
|
||||
func shutdown() {
|
||||
sws_freeContext(imgConvertCtx)
|
||||
imgConvertCtx = nil
|
||||
}
|
||||
}
|
||||
|
||||
class VideoSwresample: FrameChange {
|
||||
private var imgConvertCtx: OpaquePointer?
|
||||
private var format: AVPixelFormat = AV_PIX_FMT_NONE
|
||||
private var height: Int32 = 0
|
||||
private var width: Int32 = 0
|
||||
private var pool: CVPixelBufferPool?
|
||||
private var dstHeight: Int32?
|
||||
private var dstWidth: Int32?
|
||||
private let dstFormat: AVPixelFormat?
|
||||
private let fps: Float
|
||||
private let isDovi: Bool
|
||||
init(dstWidth: Int32? = nil, dstHeight: Int32? = nil, dstFormat: AVPixelFormat? = nil, fps: Float = 60, isDovi: Bool) {
|
||||
self.dstWidth = dstWidth
|
||||
self.dstHeight = dstHeight
|
||||
self.dstFormat = dstFormat
|
||||
self.fps = fps
|
||||
self.isDovi = isDovi
|
||||
}
|
||||
|
||||
func change(avframe: UnsafeMutablePointer<AVFrame>) throws -> MEFrame {
|
||||
let frame = VideoVTBFrame(fps: fps, isDovi: isDovi)
|
||||
if avframe.pointee.format == AV_PIX_FMT_VIDEOTOOLBOX.rawValue {
|
||||
frame.corePixelBuffer = unsafeBitCast(avframe.pointee.data.3, to: CVPixelBuffer.self)
|
||||
} else {
|
||||
frame.corePixelBuffer = transfer(frame: avframe.pointee)
|
||||
}
|
||||
return frame
|
||||
}
|
||||
|
||||
private func setup(format: AVPixelFormat, width: Int32, height: Int32, linesize: Int32) {
|
||||
if self.format == format, self.width == width, self.height == height {
|
||||
return
|
||||
}
|
||||
self.format = format
|
||||
self.height = height
|
||||
self.width = width
|
||||
let dstWidth = dstWidth ?? width
|
||||
let dstHeight = dstHeight ?? height
|
||||
let pixelFormatType: OSType
|
||||
if self.dstWidth == nil, self.dstHeight == nil, dstFormat == nil, let osType = format.osType() {
|
||||
pixelFormatType = osType
|
||||
sws_freeContext(imgConvertCtx)
|
||||
imgConvertCtx = nil
|
||||
} else {
|
||||
let dstFormat = dstFormat ?? format.bestPixelFormat
|
||||
pixelFormatType = dstFormat.osType()!
|
||||
// imgConvertCtx = sws_getContext(width, height, self.format, width, height, dstFormat, SWS_FAST_BILINEAR, nil, nil, nil)
|
||||
// AV_PIX_FMT_VIDEOTOOLBOX格式是无法进行swscale的
|
||||
imgConvertCtx = sws_getCachedContext(imgConvertCtx, width, height, self.format, dstWidth, dstHeight, dstFormat, SWS_FAST_BILINEAR, nil, nil, nil)
|
||||
}
|
||||
pool = CVPixelBufferPool.create(width: dstWidth, height: dstHeight, bytesPerRowAlignment: linesize, pixelFormatType: pixelFormatType)
|
||||
}
|
||||
|
||||
func transfer(frame: AVFrame) -> PixelBufferProtocol? {
|
||||
let format = AVPixelFormat(rawValue: frame.format)
|
||||
let width = frame.width
|
||||
let height = frame.height
|
||||
if format.leftShift > 0 {
|
||||
return PixelBuffer(frame: frame)
|
||||
}
|
||||
let pbuf = transfer(format: format, width: width, height: height, data: Array(tuple: frame.data), linesize: Array(tuple: frame.linesize))
|
||||
if let pbuf {
|
||||
pbuf.aspectRatio = frame.sample_aspect_ratio.size
|
||||
pbuf.yCbCrMatrix = frame.colorspace.ycbcrMatrix
|
||||
pbuf.colorPrimaries = frame.color_primaries.colorPrimaries
|
||||
pbuf.transferFunction = frame.color_trc.transferFunction
|
||||
// vt_pixbuf_set_colorspace
|
||||
if pbuf.transferFunction == kCVImageBufferTransferFunction_UseGamma {
|
||||
let gamma = NSNumber(value: frame.color_trc == AVCOL_TRC_GAMMA22 ? 2.2 : 2.8)
|
||||
CVBufferSetAttachment(pbuf, kCVImageBufferGammaLevelKey, gamma, .shouldPropagate)
|
||||
}
|
||||
if let chroma = frame.chroma_location.chroma {
|
||||
CVBufferSetAttachment(pbuf, kCVImageBufferChromaLocationTopFieldKey, chroma, .shouldPropagate)
|
||||
}
|
||||
pbuf.colorspace = KSOptions.colorSpace(ycbcrMatrix: pbuf.yCbCrMatrix, transferFunction: pbuf.transferFunction)
|
||||
}
|
||||
return pbuf
|
||||
}
|
||||
|
||||
func transfer(format: AVPixelFormat, width: Int32, height: Int32, data: [UnsafeMutablePointer<UInt8>?], linesize: [Int32]) -> CVPixelBuffer? {
|
||||
setup(format: format, width: width, height: height, linesize: linesize[1] == 0 ? linesize[0] : linesize[1])
|
||||
guard let pool else {
|
||||
return nil
|
||||
}
|
||||
return autoreleasepool {
|
||||
var pbuf: CVPixelBuffer?
|
||||
let ret = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pool, &pbuf)
|
||||
guard let pbuf, ret == kCVReturnSuccess else {
|
||||
return nil
|
||||
}
|
||||
CVPixelBufferLockBaseAddress(pbuf, CVPixelBufferLockFlags(rawValue: 0))
|
||||
let bufferPlaneCount = pbuf.planeCount
|
||||
if let imgConvertCtx {
|
||||
let bytesPerRow = (0 ..< bufferPlaneCount).map { i in
|
||||
Int32(CVPixelBufferGetBytesPerRowOfPlane(pbuf, i))
|
||||
}
|
||||
let contents = (0 ..< bufferPlaneCount).map { i in
|
||||
pbuf.baseAddressOfPlane(at: i)?.assumingMemoryBound(to: UInt8.self)
|
||||
}
|
||||
_ = sws_scale(imgConvertCtx, data.map { UnsafePointer($0) }, linesize, 0, height, contents, bytesPerRow)
|
||||
} else {
|
||||
let planeCount = format.planeCount
|
||||
let byteCount = format.bitDepth > 8 ? 2 : 1
|
||||
for i in 0 ..< bufferPlaneCount {
|
||||
let height = pbuf.heightOfPlane(at: i)
|
||||
let size = Int(linesize[i])
|
||||
let bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pbuf, i)
|
||||
var contents = pbuf.baseAddressOfPlane(at: i)
|
||||
var source = data[i]!
|
||||
if bufferPlaneCount < planeCount, i + 2 == planeCount {
|
||||
var sourceU = data[i]!
|
||||
var sourceV = data[i + 1]!
|
||||
var k = 0
|
||||
while k < height {
|
||||
var j = 0
|
||||
while j < size {
|
||||
contents?.advanced(by: 2 * j).copyMemory(from: sourceU.advanced(by: j), byteCount: byteCount)
|
||||
contents?.advanced(by: 2 * j + byteCount).copyMemory(from: sourceV.advanced(by: j), byteCount: byteCount)
|
||||
j += byteCount
|
||||
}
|
||||
contents = contents?.advanced(by: bytesPerRow)
|
||||
sourceU = sourceU.advanced(by: size)
|
||||
sourceV = sourceV.advanced(by: size)
|
||||
k += 1
|
||||
}
|
||||
} else if bytesPerRow == size {
|
||||
contents?.copyMemory(from: source, byteCount: height * size)
|
||||
} else {
|
||||
var j = 0
|
||||
while j < height {
|
||||
contents?.advanced(by: j * bytesPerRow).copyMemory(from: source.advanced(by: j * size), byteCount: size)
|
||||
j += 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
CVPixelBufferUnlockBaseAddress(pbuf, CVPixelBufferLockFlags(rawValue: 0))
|
||||
return pbuf
|
||||
}
|
||||
}
|
||||
|
||||
func shutdown() {
|
||||
sws_freeContext(imgConvertCtx)
|
||||
imgConvertCtx = nil
|
||||
}
|
||||
}
|
||||
|
||||
extension BinaryInteger {
|
||||
func alignment(value: Self) -> Self {
|
||||
let remainder = self % value
|
||||
return remainder == 0 ? self : self + value - remainder
|
||||
}
|
||||
}
|
||||
|
||||
typealias SwrContext = OpaquePointer
|
||||
|
||||
class AudioSwresample: FrameChange {
|
||||
private var swrContext: SwrContext?
|
||||
private var descriptor: AudioDescriptor
|
||||
private var outChannel: AVChannelLayout
|
||||
init(audioDescriptor: AudioDescriptor) {
|
||||
descriptor = audioDescriptor
|
||||
outChannel = audioDescriptor.outChannel
|
||||
_ = setup(descriptor: descriptor)
|
||||
}
|
||||
|
||||
private func setup(descriptor: AudioDescriptor) -> Bool {
|
||||
var result = swr_alloc_set_opts2(&swrContext, &descriptor.outChannel, descriptor.audioFormat.sampleFormat, Int32(descriptor.audioFormat.sampleRate), &descriptor.channel, descriptor.sampleFormat, descriptor.sampleRate, 0, nil)
|
||||
result = swr_init(swrContext)
|
||||
if result < 0 {
|
||||
shutdown()
|
||||
return false
|
||||
} else {
|
||||
outChannel = descriptor.outChannel
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
func change(avframe: UnsafeMutablePointer<AVFrame>) throws -> MEFrame {
|
||||
if !(descriptor == avframe.pointee) || outChannel != descriptor.outChannel {
|
||||
let newDescriptor = AudioDescriptor(frame: avframe.pointee)
|
||||
if setup(descriptor: newDescriptor) {
|
||||
descriptor = newDescriptor
|
||||
} else {
|
||||
throw NSError(errorCode: .auidoSwrInit, userInfo: ["outChannel": newDescriptor.outChannel, "inChannel": newDescriptor.channel])
|
||||
}
|
||||
}
|
||||
let numberOfSamples = avframe.pointee.nb_samples
|
||||
let outSamples = swr_get_out_samples(swrContext, numberOfSamples)
|
||||
var frameBuffer = Array(tuple: avframe.pointee.data).map { UnsafePointer<UInt8>($0) }
|
||||
let channels = descriptor.outChannel.nb_channels
|
||||
var bufferSize = [Int32(0)]
|
||||
// 返回值是有乘以声道,所以不用返回值
|
||||
_ = av_samples_get_buffer_size(&bufferSize, channels, outSamples, descriptor.audioFormat.sampleFormat, 1)
|
||||
let frame = AudioFrame(dataSize: Int(bufferSize[0]), audioFormat: descriptor.audioFormat)
|
||||
frame.numberOfSamples = UInt32(swr_convert(swrContext, &frame.data, outSamples, &frameBuffer, numberOfSamples))
|
||||
return frame
|
||||
}
|
||||
|
||||
func shutdown() {
|
||||
swr_free(&swrContext)
|
||||
}
|
||||
}
|
||||
|
||||
public class AudioDescriptor: Equatable {
|
||||
// static let defaultValue = AudioDescriptor()
|
||||
public let sampleRate: Int32
|
||||
public private(set) var audioFormat: AVAudioFormat
|
||||
fileprivate(set) var channel: AVChannelLayout
|
||||
fileprivate let sampleFormat: AVSampleFormat
|
||||
fileprivate var outChannel: AVChannelLayout
|
||||
|
||||
private convenience init() {
|
||||
self.init(sampleFormat: AV_SAMPLE_FMT_FLT, sampleRate: 48000, channel: AVChannelLayout.defaultValue)
|
||||
}
|
||||
|
||||
convenience init(codecpar: AVCodecParameters) {
|
||||
self.init(sampleFormat: AVSampleFormat(rawValue: codecpar.format), sampleRate: codecpar.sample_rate, channel: codecpar.ch_layout)
|
||||
}
|
||||
|
||||
convenience init(frame: AVFrame) {
|
||||
self.init(sampleFormat: AVSampleFormat(rawValue: frame.format), sampleRate: frame.sample_rate, channel: frame.ch_layout)
|
||||
}
|
||||
|
||||
init(sampleFormat: AVSampleFormat, sampleRate: Int32, channel: AVChannelLayout) {
|
||||
self.channel = channel
|
||||
outChannel = channel
|
||||
if sampleRate <= 0 {
|
||||
self.sampleRate = 48000
|
||||
} else {
|
||||
self.sampleRate = sampleRate
|
||||
}
|
||||
self.sampleFormat = sampleFormat
|
||||
#if os(macOS)
|
||||
let channelCount = AVAudioChannelCount(2)
|
||||
#else
|
||||
let channelCount = KSOptions.outputNumberOfChannels(channelCount: AVAudioChannelCount(outChannel.nb_channels))
|
||||
#endif
|
||||
audioFormat = AudioDescriptor.audioFormat(sampleFormat: sampleFormat, sampleRate: self.sampleRate, outChannel: &outChannel, channelCount: channelCount)
|
||||
}
|
||||
|
||||
public static func == (lhs: AudioDescriptor, rhs: AudioDescriptor) -> Bool {
|
||||
lhs.sampleFormat == rhs.sampleFormat && lhs.sampleRate == rhs.sampleRate && lhs.channel == rhs.channel
|
||||
}
|
||||
|
||||
public static func == (lhs: AudioDescriptor, rhs: AVFrame) -> Bool {
|
||||
var sampleRate = rhs.sample_rate
|
||||
if sampleRate <= 0 {
|
||||
sampleRate = 48000
|
||||
}
|
||||
return lhs.sampleFormat == AVSampleFormat(rawValue: rhs.format) && lhs.sampleRate == sampleRate && lhs.channel == rhs.ch_layout
|
||||
}
|
||||
|
||||
static func audioFormat(sampleFormat: AVSampleFormat, sampleRate: Int32, outChannel: inout AVChannelLayout, channelCount: AVAudioChannelCount) -> AVAudioFormat {
|
||||
if channelCount != AVAudioChannelCount(outChannel.nb_channels) {
|
||||
av_channel_layout_default(&outChannel, Int32(channelCount))
|
||||
}
|
||||
let layoutTag: AudioChannelLayoutTag
|
||||
if let tag = outChannel.layoutTag {
|
||||
layoutTag = tag
|
||||
} else {
|
||||
av_channel_layout_default(&outChannel, Int32(channelCount))
|
||||
if let tag = outChannel.layoutTag {
|
||||
layoutTag = tag
|
||||
} else {
|
||||
av_channel_layout_default(&outChannel, 2)
|
||||
layoutTag = outChannel.layoutTag!
|
||||
}
|
||||
}
|
||||
KSLog("[audio] out channelLayout: \(outChannel)")
|
||||
var commonFormat: AVAudioCommonFormat
|
||||
var interleaved: Bool
|
||||
switch sampleFormat {
|
||||
case AV_SAMPLE_FMT_S16:
|
||||
commonFormat = .pcmFormatInt16
|
||||
interleaved = true
|
||||
case AV_SAMPLE_FMT_S32:
|
||||
commonFormat = .pcmFormatInt32
|
||||
interleaved = true
|
||||
case AV_SAMPLE_FMT_FLT:
|
||||
commonFormat = .pcmFormatFloat32
|
||||
interleaved = true
|
||||
case AV_SAMPLE_FMT_DBL:
|
||||
commonFormat = .pcmFormatFloat64
|
||||
interleaved = true
|
||||
case AV_SAMPLE_FMT_S16P:
|
||||
commonFormat = .pcmFormatInt16
|
||||
interleaved = false
|
||||
case AV_SAMPLE_FMT_S32P:
|
||||
commonFormat = .pcmFormatInt32
|
||||
interleaved = false
|
||||
case AV_SAMPLE_FMT_FLTP:
|
||||
commonFormat = .pcmFormatFloat32
|
||||
interleaved = false
|
||||
case AV_SAMPLE_FMT_DBLP:
|
||||
commonFormat = .pcmFormatFloat64
|
||||
interleaved = false
|
||||
default:
|
||||
commonFormat = .pcmFormatFloat32
|
||||
interleaved = false
|
||||
}
|
||||
interleaved = KSOptions.audioPlayerType == AudioRendererPlayer.self
|
||||
if !(KSOptions.audioPlayerType == AudioRendererPlayer.self || KSOptions.audioPlayerType == AudioUnitPlayer.self) {
|
||||
commonFormat = .pcmFormatFloat32
|
||||
}
|
||||
return AVAudioFormat(commonFormat: commonFormat, sampleRate: Double(sampleRate), interleaved: interleaved, channelLayout: AVAudioChannelLayout(layoutTag: layoutTag)!)
|
||||
// AVAudioChannelLayout(layout: outChannel.layoutTag.channelLayout)
|
||||
}
|
||||
|
||||
public func updateAudioFormat() {
|
||||
#if os(macOS)
|
||||
let channelCount = AVAudioChannelCount(2)
|
||||
#else
|
||||
let channelCount = KSOptions.outputNumberOfChannels(channelCount: AVAudioChannelCount(channel.nb_channels))
|
||||
#endif
|
||||
audioFormat = AudioDescriptor.audioFormat(sampleFormat: sampleFormat, sampleRate: sampleRate, outChannel: &outChannel, channelCount: channelCount)
|
||||
}
|
||||
}
|
||||
136
KSPlayer-main/Sources/KSPlayer/MEPlayer/SubtitleDecode.swift
Normal file
136
KSPlayer-main/Sources/KSPlayer/MEPlayer/SubtitleDecode.swift
Normal file
@@ -0,0 +1,136 @@
|
||||
//
|
||||
// SubtitleDecode.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/11.
|
||||
//
|
||||
|
||||
import CoreGraphics
|
||||
import Foundation
|
||||
import Libavformat
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
class SubtitleDecode: DecodeProtocol {
|
||||
private var codecContext: UnsafeMutablePointer<AVCodecContext>?
|
||||
private let scale = VideoSwresample(dstFormat: AV_PIX_FMT_ARGB, isDovi: false)
|
||||
private var subtitle = AVSubtitle()
|
||||
private var startTime = TimeInterval(0)
|
||||
private let assParse = AssParse()
|
||||
required init(assetTrack: FFmpegAssetTrack, options: KSOptions) {
|
||||
startTime = assetTrack.startTime.seconds
|
||||
do {
|
||||
codecContext = try assetTrack.createContext(options: options)
|
||||
if let pointer = codecContext?.pointee.subtitle_header {
|
||||
let subtitleHeader = String(cString: pointer)
|
||||
_ = assParse.canParse(scanner: Scanner(string: subtitleHeader))
|
||||
}
|
||||
} catch {
|
||||
KSLog(error as CustomStringConvertible)
|
||||
}
|
||||
}
|
||||
|
||||
func decode() {}
|
||||
|
||||
func decodeFrame(from packet: Packet, completionHandler: @escaping (Result<MEFrame, Error>) -> Void) {
|
||||
guard let codecContext else {
|
||||
return
|
||||
}
|
||||
var gotsubtitle = Int32(0)
|
||||
_ = avcodec_decode_subtitle2(codecContext, &subtitle, &gotsubtitle, packet.corePacket)
|
||||
if gotsubtitle == 0 {
|
||||
return
|
||||
}
|
||||
let timestamp = packet.timestamp
|
||||
var start = packet.assetTrack.timebase.cmtime(for: timestamp).seconds + TimeInterval(subtitle.start_display_time) / 1000.0
|
||||
if start >= startTime {
|
||||
start -= startTime
|
||||
}
|
||||
var duration = 0.0
|
||||
if subtitle.end_display_time != UInt32.max {
|
||||
duration = TimeInterval(subtitle.end_display_time - subtitle.start_display_time) / 1000.0
|
||||
}
|
||||
if duration == 0, packet.duration != 0 {
|
||||
duration = packet.assetTrack.timebase.cmtime(for: packet.duration).seconds
|
||||
}
|
||||
var parts = text(subtitle: subtitle)
|
||||
/// 不用preSubtitleFrame来进行更新end。而是插入一个空的字幕来更新字幕。
|
||||
/// 因为字幕有可能不按顺序解码。这样就会导致end比start小,然后这个字幕就不会被清空了。
|
||||
if parts.isEmpty {
|
||||
parts.append(SubtitlePart(0, 0, attributedString: nil))
|
||||
}
|
||||
for part in parts {
|
||||
part.start = start
|
||||
if duration == 0 {
|
||||
part.end = .infinity
|
||||
} else {
|
||||
part.end = start + duration
|
||||
}
|
||||
let frame = SubtitleFrame(part: part, timebase: packet.assetTrack.timebase)
|
||||
frame.timestamp = timestamp
|
||||
completionHandler(.success(frame))
|
||||
}
|
||||
avsubtitle_free(&subtitle)
|
||||
}
|
||||
|
||||
func doFlushCodec() {}
|
||||
|
||||
func shutdown() {
|
||||
scale.shutdown()
|
||||
avsubtitle_free(&subtitle)
|
||||
if let codecContext {
|
||||
avcodec_close(codecContext)
|
||||
avcodec_free_context(&self.codecContext)
|
||||
}
|
||||
}
|
||||
|
||||
private func text(subtitle: AVSubtitle) -> [SubtitlePart] {
|
||||
var parts = [SubtitlePart]()
|
||||
var images = [(CGRect, CGImage)]()
|
||||
var origin: CGPoint = .zero
|
||||
var attributedString: NSMutableAttributedString?
|
||||
for i in 0 ..< Int(subtitle.num_rects) {
|
||||
guard let rect = subtitle.rects[i]?.pointee else {
|
||||
continue
|
||||
}
|
||||
if i == 0 {
|
||||
origin = CGPoint(x: Int(rect.x), y: Int(rect.y))
|
||||
}
|
||||
if let text = rect.text {
|
||||
if attributedString == nil {
|
||||
attributedString = NSMutableAttributedString()
|
||||
}
|
||||
attributedString?.append(NSAttributedString(string: String(cString: text)))
|
||||
} else if let ass = rect.ass {
|
||||
let scanner = Scanner(string: String(cString: ass))
|
||||
if let group = assParse.parsePart(scanner: scanner) {
|
||||
parts.append(group)
|
||||
}
|
||||
} else if rect.type == SUBTITLE_BITMAP {
|
||||
if let image = scale.transfer(format: AV_PIX_FMT_PAL8, width: rect.w, height: rect.h, data: Array(tuple: rect.data), linesize: Array(tuple: rect.linesize))?.cgImage() {
|
||||
images.append((CGRect(x: Int(rect.x), y: Int(rect.y), width: Int(rect.w), height: Int(rect.h)), image))
|
||||
}
|
||||
}
|
||||
}
|
||||
if images.count > 0 {
|
||||
let part = SubtitlePart(0, 0, attributedString: nil)
|
||||
if images.count > 1 {
|
||||
origin = .zero
|
||||
}
|
||||
var image: UIImage?
|
||||
// 因为字幕需要有透明度,所以不能用jpg;tif在iOS支持没有那么好,会有绿色背景; 用heic格式,展示的时候会卡主线程;所以最终用png。
|
||||
if let data = CGImage.combine(images: images)?.data(type: .png, quality: 0.2) {
|
||||
image = UIImage(data: data)
|
||||
}
|
||||
part.image = image
|
||||
part.origin = origin
|
||||
parts.append(part)
|
||||
}
|
||||
if let attributedString {
|
||||
parts.append(SubtitlePart(0, 0, attributedString: attributedString))
|
||||
}
|
||||
return parts
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,132 @@
|
||||
//
|
||||
// ThumbnailController.swift
|
||||
//
|
||||
//
|
||||
// Created by kintan on 12/27/23.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
import Libavcodec
|
||||
import Libavformat
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#endif
|
||||
public struct FFThumbnail {
|
||||
public let image: UIImage
|
||||
public let time: TimeInterval
|
||||
}
|
||||
|
||||
public protocol ThumbnailControllerDelegate: AnyObject {
|
||||
func didUpdate(thumbnails: [FFThumbnail], forFile file: URL, withProgress: Int)
|
||||
}
|
||||
|
||||
public class ThumbnailController {
|
||||
public weak var delegate: ThumbnailControllerDelegate?
|
||||
private let thumbnailCount: Int
|
||||
public init(thumbnailCount: Int = 100) {
|
||||
self.thumbnailCount = thumbnailCount
|
||||
}
|
||||
|
||||
public func generateThumbnail(for url: URL, thumbWidth: Int32 = 240) async throws -> [FFThumbnail] {
|
||||
try await Task {
|
||||
try getPeeks(for: url, thumbWidth: thumbWidth)
|
||||
}.value
|
||||
}
|
||||
|
||||
private func getPeeks(for url: URL, thumbWidth: Int32 = 240) throws -> [FFThumbnail] {
|
||||
let urlString: String
|
||||
if url.isFileURL {
|
||||
urlString = url.path
|
||||
} else {
|
||||
urlString = url.absoluteString
|
||||
}
|
||||
var thumbnails = [FFThumbnail]()
|
||||
var formatCtx = avformat_alloc_context()
|
||||
defer {
|
||||
avformat_close_input(&formatCtx)
|
||||
}
|
||||
var result = avformat_open_input(&formatCtx, urlString, nil, nil)
|
||||
guard result == 0, let formatCtx else {
|
||||
throw NSError(errorCode: .formatOpenInput, avErrorCode: result)
|
||||
}
|
||||
result = avformat_find_stream_info(formatCtx, nil)
|
||||
guard result == 0 else {
|
||||
throw NSError(errorCode: .formatFindStreamInfo, avErrorCode: result)
|
||||
}
|
||||
var videoStreamIndex = -1
|
||||
for i in 0 ..< Int32(formatCtx.pointee.nb_streams) {
|
||||
if formatCtx.pointee.streams[Int(i)]?.pointee.codecpar.pointee.codec_type == AVMEDIA_TYPE_VIDEO {
|
||||
videoStreamIndex = Int(i)
|
||||
break
|
||||
}
|
||||
}
|
||||
guard videoStreamIndex >= 0, let videoStream = formatCtx.pointee.streams[videoStreamIndex] else {
|
||||
throw NSError(description: "No video stream")
|
||||
}
|
||||
|
||||
let videoAvgFrameRate = videoStream.pointee.avg_frame_rate
|
||||
if videoAvgFrameRate.den == 0 || av_q2d(videoAvgFrameRate) == 0 {
|
||||
throw NSError(description: "Avg frame rate = 0, ignore")
|
||||
}
|
||||
var codecContext = try videoStream.pointee.codecpar.pointee.createContext(options: nil)
|
||||
defer {
|
||||
avcodec_close(codecContext)
|
||||
var codecContext: UnsafeMutablePointer<AVCodecContext>? = codecContext
|
||||
avcodec_free_context(&codecContext)
|
||||
}
|
||||
let thumbHeight = thumbWidth * codecContext.pointee.height / codecContext.pointee.width
|
||||
let reScale = VideoSwresample(dstWidth: thumbWidth, dstHeight: thumbHeight, isDovi: false)
|
||||
// let duration = formatCtx.pointee.duration
|
||||
// 因为是针对视频流来进行seek。所以不能直接取formatCtx的duration
|
||||
let duration = av_rescale_q(formatCtx.pointee.duration,
|
||||
AVRational(num: 1, den: AV_TIME_BASE), videoStream.pointee.time_base)
|
||||
let interval = duration / Int64(thumbnailCount)
|
||||
var packet = AVPacket()
|
||||
let timeBase = Timebase(videoStream.pointee.time_base)
|
||||
var frame = av_frame_alloc()
|
||||
defer {
|
||||
av_frame_free(&frame)
|
||||
}
|
||||
guard let frame else {
|
||||
throw NSError(description: "can not av_frame_alloc")
|
||||
}
|
||||
for i in 0 ..< thumbnailCount {
|
||||
let seek_pos = interval * Int64(i) + videoStream.pointee.start_time
|
||||
avcodec_flush_buffers(codecContext)
|
||||
result = av_seek_frame(formatCtx, Int32(videoStreamIndex), seek_pos, AVSEEK_FLAG_BACKWARD)
|
||||
guard result == 0 else {
|
||||
return thumbnails
|
||||
}
|
||||
avcodec_flush_buffers(codecContext)
|
||||
while av_read_frame(formatCtx, &packet) >= 0 {
|
||||
if packet.stream_index == Int32(videoStreamIndex) {
|
||||
if avcodec_send_packet(codecContext, &packet) < 0 {
|
||||
break
|
||||
}
|
||||
let ret = avcodec_receive_frame(codecContext, frame)
|
||||
if ret < 0 {
|
||||
if ret == -EAGAIN {
|
||||
continue
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
let image = reScale.transfer(frame: frame.pointee)?.cgImage().map {
|
||||
UIImage(cgImage: $0)
|
||||
}
|
||||
let currentTimeStamp = frame.pointee.best_effort_timestamp
|
||||
if let image {
|
||||
let thumbnail = FFThumbnail(image: image, time: timeBase.cmtime(for: currentTimeStamp).seconds)
|
||||
thumbnails.append(thumbnail)
|
||||
delegate?.didUpdate(thumbnails: thumbnails, forFile: url, withProgress: i)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
av_packet_unref(&packet)
|
||||
reScale.shutdown()
|
||||
return thumbnails
|
||||
}
|
||||
}
|
||||
216
KSPlayer-main/Sources/KSPlayer/MEPlayer/VideoToolboxDecode.swift
Normal file
216
KSPlayer-main/Sources/KSPlayer/MEPlayer/VideoToolboxDecode.swift
Normal file
@@ -0,0 +1,216 @@
|
||||
//
|
||||
// VideoToolboxDecode.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2018/3/10.
|
||||
//
|
||||
|
||||
import FFmpegKit
|
||||
import Libavformat
|
||||
#if canImport(VideoToolbox)
|
||||
import VideoToolbox
|
||||
|
||||
class VideoToolboxDecode: DecodeProtocol {
|
||||
private var session: DecompressionSession {
|
||||
didSet {
|
||||
VTDecompressionSessionInvalidate(oldValue.decompressionSession)
|
||||
}
|
||||
}
|
||||
|
||||
private let options: KSOptions
|
||||
private var startTime = Int64(0)
|
||||
private var lastPosition = Int64(0)
|
||||
private var needReconfig = false
|
||||
|
||||
init(options: KSOptions, session: DecompressionSession) {
|
||||
self.options = options
|
||||
self.session = session
|
||||
}
|
||||
|
||||
func decodeFrame(from packet: Packet, completionHandler: @escaping (Result<MEFrame, Error>) -> Void) {
|
||||
if needReconfig {
|
||||
// 解决从后台切换到前台,解码失败的问题
|
||||
session = DecompressionSession(assetTrack: session.assetTrack, options: options)!
|
||||
doFlushCodec()
|
||||
needReconfig = false
|
||||
}
|
||||
guard let corePacket = packet.corePacket?.pointee, let data = corePacket.data else {
|
||||
return
|
||||
}
|
||||
do {
|
||||
let sampleBuffer = try session.formatDescription.getSampleBuffer(isConvertNALSize: session.assetTrack.isConvertNALSize, data: data, size: Int(corePacket.size))
|
||||
let flags: VTDecodeFrameFlags = [
|
||||
._EnableAsynchronousDecompression,
|
||||
]
|
||||
var flagOut = VTDecodeInfoFlags.frameDropped
|
||||
let timestamp = packet.timestamp
|
||||
let packetFlags = corePacket.flags
|
||||
let duration = corePacket.duration
|
||||
let size = corePacket.size
|
||||
let status = VTDecompressionSessionDecodeFrame(session.decompressionSession, sampleBuffer: sampleBuffer, flags: flags, infoFlagsOut: &flagOut) { [weak self] status, infoFlags, imageBuffer, _, _ in
|
||||
guard let self, !infoFlags.contains(.frameDropped) else {
|
||||
return
|
||||
}
|
||||
guard status == noErr else {
|
||||
if status == kVTInvalidSessionErr || status == kVTVideoDecoderMalfunctionErr || status == kVTVideoDecoderBadDataErr {
|
||||
if packet.isKeyFrame {
|
||||
completionHandler(.failure(NSError(errorCode: .codecVideoReceiveFrame, avErrorCode: status)))
|
||||
} else {
|
||||
// 解决从后台切换到前台,解码失败的问题
|
||||
self.needReconfig = true
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
let frame = VideoVTBFrame(fps: session.assetTrack.nominalFrameRate, isDovi: session.assetTrack.dovi != nil)
|
||||
frame.corePixelBuffer = imageBuffer
|
||||
frame.timebase = session.assetTrack.timebase
|
||||
if packet.isKeyFrame, packetFlags & AV_PKT_FLAG_DISCARD != 0, self.lastPosition > 0 {
|
||||
self.startTime = self.lastPosition - timestamp
|
||||
}
|
||||
self.lastPosition = max(self.lastPosition, timestamp)
|
||||
frame.position = packet.position
|
||||
frame.timestamp = self.startTime + timestamp
|
||||
frame.duration = duration
|
||||
frame.size = size
|
||||
self.lastPosition += frame.duration
|
||||
completionHandler(.success(frame))
|
||||
}
|
||||
if status == noErr {
|
||||
if !flags.contains(._EnableAsynchronousDecompression) {
|
||||
VTDecompressionSessionWaitForAsynchronousFrames(session.decompressionSession)
|
||||
}
|
||||
} else if status == kVTInvalidSessionErr || status == kVTVideoDecoderMalfunctionErr || status == kVTVideoDecoderBadDataErr {
|
||||
if packet.isKeyFrame {
|
||||
throw NSError(errorCode: .codecVideoReceiveFrame, avErrorCode: status)
|
||||
} else {
|
||||
// 解决从后台切换到前台,解码失败的问题
|
||||
needReconfig = true
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
completionHandler(.failure(error))
|
||||
}
|
||||
}
|
||||
|
||||
func doFlushCodec() {
|
||||
lastPosition = 0
|
||||
startTime = 0
|
||||
}
|
||||
|
||||
func shutdown() {
|
||||
VTDecompressionSessionInvalidate(session.decompressionSession)
|
||||
}
|
||||
|
||||
func decode() {
|
||||
lastPosition = 0
|
||||
startTime = 0
|
||||
}
|
||||
}
|
||||
|
||||
class DecompressionSession {
|
||||
fileprivate let formatDescription: CMFormatDescription
|
||||
fileprivate let decompressionSession: VTDecompressionSession
|
||||
fileprivate var assetTrack: FFmpegAssetTrack
|
||||
init?(assetTrack: FFmpegAssetTrack, options: KSOptions) {
|
||||
self.assetTrack = assetTrack
|
||||
guard let pixelFormatType = assetTrack.pixelFormatType, let formatDescription = assetTrack.formatDescription else {
|
||||
return nil
|
||||
}
|
||||
self.formatDescription = formatDescription
|
||||
#if os(macOS)
|
||||
VTRegisterProfessionalVideoWorkflowVideoDecoders()
|
||||
if #available(macOS 11.0, *) {
|
||||
VTRegisterSupplementalVideoDecoderIfAvailable(formatDescription.mediaSubType.rawValue)
|
||||
}
|
||||
#endif
|
||||
// VTDecompressionSessionCanAcceptFormatDescription(<#T##session: VTDecompressionSession##VTDecompressionSession#>, formatDescription: <#T##CMFormatDescription#>)
|
||||
let attributes: NSMutableDictionary = [
|
||||
kCVPixelBufferPixelFormatTypeKey: pixelFormatType,
|
||||
kCVPixelBufferMetalCompatibilityKey: true,
|
||||
kCVPixelBufferWidthKey: assetTrack.codecpar.width,
|
||||
kCVPixelBufferHeightKey: assetTrack.codecpar.height,
|
||||
kCVPixelBufferIOSurfacePropertiesKey: NSDictionary(),
|
||||
]
|
||||
var session: VTDecompressionSession?
|
||||
// swiftlint:disable line_length
|
||||
let status = VTDecompressionSessionCreate(allocator: kCFAllocatorDefault, formatDescription: formatDescription, decoderSpecification: CMFormatDescriptionGetExtensions(formatDescription), imageBufferAttributes: attributes, outputCallback: nil, decompressionSessionOut: &session)
|
||||
// swiftlint:enable line_length
|
||||
guard status == noErr, let decompressionSession = session else {
|
||||
return nil
|
||||
}
|
||||
if #available(iOS 14.0, tvOS 14.0, macOS 11.0, *) {
|
||||
VTSessionSetProperty(decompressionSession, key: kVTDecompressionPropertyKey_PropagatePerFrameHDRDisplayMetadata,
|
||||
value: kCFBooleanTrue)
|
||||
}
|
||||
if let destinationDynamicRange = options.availableDynamicRange(nil) {
|
||||
let pixelTransferProperties = [kVTPixelTransferPropertyKey_DestinationColorPrimaries: destinationDynamicRange.colorPrimaries,
|
||||
kVTPixelTransferPropertyKey_DestinationTransferFunction: destinationDynamicRange.transferFunction,
|
||||
kVTPixelTransferPropertyKey_DestinationYCbCrMatrix: destinationDynamicRange.yCbCrMatrix]
|
||||
VTSessionSetProperty(decompressionSession,
|
||||
key: kVTDecompressionPropertyKey_PixelTransferProperties,
|
||||
value: pixelTransferProperties as CFDictionary)
|
||||
}
|
||||
self.decompressionSession = decompressionSession
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
extension CMFormatDescription {
|
||||
fileprivate func getSampleBuffer(isConvertNALSize: Bool, data: UnsafeMutablePointer<UInt8>, size: Int) throws -> CMSampleBuffer {
|
||||
if isConvertNALSize {
|
||||
var ioContext: UnsafeMutablePointer<AVIOContext>?
|
||||
let status = avio_open_dyn_buf(&ioContext)
|
||||
if status == 0 {
|
||||
var nalSize: UInt32 = 0
|
||||
let end = data + size
|
||||
var nalStart = data
|
||||
while nalStart < end {
|
||||
nalSize = UInt32(nalStart[0]) << 16 | UInt32(nalStart[1]) << 8 | UInt32(nalStart[2])
|
||||
avio_wb32(ioContext, nalSize)
|
||||
nalStart += 3
|
||||
avio_write(ioContext, nalStart, Int32(nalSize))
|
||||
nalStart += Int(nalSize)
|
||||
}
|
||||
var demuxBuffer: UnsafeMutablePointer<UInt8>?
|
||||
let demuxSze = avio_close_dyn_buf(ioContext, &demuxBuffer)
|
||||
return try createSampleBuffer(data: demuxBuffer, size: Int(demuxSze))
|
||||
} else {
|
||||
throw NSError(errorCode: .codecVideoReceiveFrame, avErrorCode: status)
|
||||
}
|
||||
} else {
|
||||
return try createSampleBuffer(data: data, size: size)
|
||||
}
|
||||
}
|
||||
|
||||
private func createSampleBuffer(data: UnsafeMutablePointer<UInt8>?, size: Int) throws -> CMSampleBuffer {
|
||||
var blockBuffer: CMBlockBuffer?
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
// swiftlint:disable line_length
|
||||
var status = CMBlockBufferCreateWithMemoryBlock(allocator: kCFAllocatorDefault, memoryBlock: data, blockLength: size, blockAllocator: kCFAllocatorNull, customBlockSource: nil, offsetToData: 0, dataLength: size, flags: 0, blockBufferOut: &blockBuffer)
|
||||
if status == noErr {
|
||||
status = CMSampleBufferCreate(allocator: kCFAllocatorDefault, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: self, sampleCount: 1, sampleTimingEntryCount: 0, sampleTimingArray: nil, sampleSizeEntryCount: 0, sampleSizeArray: nil, sampleBufferOut: &sampleBuffer)
|
||||
if let sampleBuffer {
|
||||
return sampleBuffer
|
||||
}
|
||||
}
|
||||
throw NSError(errorCode: .codecVideoReceiveFrame, avErrorCode: status)
|
||||
// swiftlint:enable line_length
|
||||
}
|
||||
}
|
||||
|
||||
extension CMVideoCodecType {
|
||||
var avc: String {
|
||||
switch self {
|
||||
case kCMVideoCodecType_MPEG4Video:
|
||||
return "esds"
|
||||
case kCMVideoCodecType_H264:
|
||||
return "avcC"
|
||||
case kCMVideoCodecType_HEVC:
|
||||
return "hvcC"
|
||||
case kCMVideoCodecType_VP9:
|
||||
return "vpcC"
|
||||
default: return "avcC"
|
||||
}
|
||||
}
|
||||
}
|
||||
299
KSPlayer-main/Sources/KSPlayer/Metal/DisplayModel.swift
Normal file
299
KSPlayer-main/Sources/KSPlayer/Metal/DisplayModel.swift
Normal file
@@ -0,0 +1,299 @@
|
||||
//
|
||||
// DisplayModel.swift
|
||||
// KSPlayer-iOS
|
||||
//
|
||||
// Created by kintan on 2020/1/11.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import Metal
|
||||
import simd
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#endif
|
||||
|
||||
extension DisplayEnum {
|
||||
private static var planeDisplay = PlaneDisplayModel()
|
||||
private static var vrDiaplay = VRDisplayModel()
|
||||
private static var vrBoxDiaplay = VRBoxDisplayModel()
|
||||
|
||||
func set(encoder: MTLRenderCommandEncoder) {
|
||||
switch self {
|
||||
case .plane:
|
||||
DisplayEnum.planeDisplay.set(encoder: encoder)
|
||||
case .vr:
|
||||
DisplayEnum.vrDiaplay.set(encoder: encoder)
|
||||
case .vrBox:
|
||||
DisplayEnum.vrBoxDiaplay.set(encoder: encoder)
|
||||
}
|
||||
}
|
||||
|
||||
func pipeline(planeCount: Int, bitDepth: Int32) -> MTLRenderPipelineState {
|
||||
switch self {
|
||||
case .plane:
|
||||
return DisplayEnum.planeDisplay.pipeline(planeCount: planeCount, bitDepth: bitDepth)
|
||||
case .vr:
|
||||
return DisplayEnum.vrDiaplay.pipeline(planeCount: planeCount, bitDepth: bitDepth)
|
||||
case .vrBox:
|
||||
return DisplayEnum.vrBoxDiaplay.pipeline(planeCount: planeCount, bitDepth: bitDepth)
|
||||
}
|
||||
}
|
||||
|
||||
func touchesMoved(touch: UITouch) {
|
||||
switch self {
|
||||
case .vr:
|
||||
DisplayEnum.vrDiaplay.touchesMoved(touch: touch)
|
||||
case .vrBox:
|
||||
DisplayEnum.vrBoxDiaplay.touchesMoved(touch: touch)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class PlaneDisplayModel {
|
||||
private lazy var yuv = MetalRender.makePipelineState(fragmentFunction: "displayYUVTexture")
|
||||
private lazy var yuvp010LE = MetalRender.makePipelineState(fragmentFunction: "displayYUVTexture", bitDepth: 10)
|
||||
private lazy var nv12 = MetalRender.makePipelineState(fragmentFunction: "displayNV12Texture")
|
||||
private lazy var p010LE = MetalRender.makePipelineState(fragmentFunction: "displayNV12Texture", bitDepth: 10)
|
||||
private lazy var bgra = MetalRender.makePipelineState(fragmentFunction: "displayTexture")
|
||||
let indexCount: Int
|
||||
let indexType = MTLIndexType.uint16
|
||||
let primitiveType = MTLPrimitiveType.triangleStrip
|
||||
let indexBuffer: MTLBuffer
|
||||
let posBuffer: MTLBuffer?
|
||||
let uvBuffer: MTLBuffer?
|
||||
|
||||
fileprivate init() {
|
||||
let (indices, positions, uvs) = PlaneDisplayModel.genSphere()
|
||||
let device = MetalRender.device
|
||||
indexCount = indices.count
|
||||
indexBuffer = device.makeBuffer(bytes: indices, length: MemoryLayout<UInt16>.size * indexCount)!
|
||||
posBuffer = device.makeBuffer(bytes: positions, length: MemoryLayout<simd_float4>.size * positions.count)
|
||||
uvBuffer = device.makeBuffer(bytes: uvs, length: MemoryLayout<simd_float2>.size * uvs.count)
|
||||
}
|
||||
|
||||
private static func genSphere() -> ([UInt16], [simd_float4], [simd_float2]) {
|
||||
let indices: [UInt16] = [0, 1, 2, 3]
|
||||
let positions: [simd_float4] = [
|
||||
[-1.0, -1.0, 0.0, 1.0],
|
||||
[-1.0, 1.0, 0.0, 1.0],
|
||||
[1.0, -1.0, 0.0, 1.0],
|
||||
[1.0, 1.0, 0.0, 1.0],
|
||||
]
|
||||
let uvs: [simd_float2] = [
|
||||
[0.0, 1.0],
|
||||
[0.0, 0.0],
|
||||
[1.0, 1.0],
|
||||
[1.0, 0.0],
|
||||
]
|
||||
return (indices, positions, uvs)
|
||||
}
|
||||
|
||||
func set(encoder: MTLRenderCommandEncoder) {
|
||||
encoder.setFrontFacing(.clockwise)
|
||||
encoder.setVertexBuffer(posBuffer, offset: 0, index: 0)
|
||||
encoder.setVertexBuffer(uvBuffer, offset: 0, index: 1)
|
||||
encoder.drawIndexedPrimitives(type: primitiveType, indexCount: indexCount, indexType: indexType, indexBuffer: indexBuffer, indexBufferOffset: 0)
|
||||
}
|
||||
|
||||
func pipeline(planeCount: Int, bitDepth: Int32) -> MTLRenderPipelineState {
|
||||
switch planeCount {
|
||||
case 3:
|
||||
if bitDepth == 10 {
|
||||
return yuvp010LE
|
||||
} else {
|
||||
return yuv
|
||||
}
|
||||
case 2:
|
||||
if bitDepth == 10 {
|
||||
return p010LE
|
||||
} else {
|
||||
return nv12
|
||||
}
|
||||
case 1:
|
||||
return bgra
|
||||
default:
|
||||
return bgra
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
private class SphereDisplayModel {
|
||||
private lazy var yuv = MetalRender.makePipelineState(fragmentFunction: "displayYUVTexture", isSphere: true)
|
||||
private lazy var yuvp010LE = MetalRender.makePipelineState(fragmentFunction: "displayYUVTexture", isSphere: true, bitDepth: 10)
|
||||
private lazy var nv12 = MetalRender.makePipelineState(fragmentFunction: "displayNV12Texture", isSphere: true)
|
||||
private lazy var p010LE = MetalRender.makePipelineState(fragmentFunction: "displayNV12Texture", isSphere: true, bitDepth: 10)
|
||||
private lazy var bgra = MetalRender.makePipelineState(fragmentFunction: "displayTexture", isSphere: true)
|
||||
private var fingerRotationX = Float(0)
|
||||
private var fingerRotationY = Float(0)
|
||||
fileprivate var modelViewMatrix = matrix_identity_float4x4
|
||||
let indexCount: Int
|
||||
let indexType = MTLIndexType.uint16
|
||||
let primitiveType = MTLPrimitiveType.triangle
|
||||
let indexBuffer: MTLBuffer
|
||||
let posBuffer: MTLBuffer?
|
||||
let uvBuffer: MTLBuffer?
|
||||
@MainActor
|
||||
fileprivate init() {
|
||||
let (indices, positions, uvs) = SphereDisplayModel.genSphere()
|
||||
let device = MetalRender.device
|
||||
indexCount = indices.count
|
||||
indexBuffer = device.makeBuffer(bytes: indices, length: MemoryLayout<UInt16>.size * indexCount)!
|
||||
posBuffer = device.makeBuffer(bytes: positions, length: MemoryLayout<simd_float4>.size * positions.count)
|
||||
uvBuffer = device.makeBuffer(bytes: uvs, length: MemoryLayout<simd_float2>.size * uvs.count)
|
||||
#if canImport(UIKit) && canImport(CoreMotion)
|
||||
if KSOptions.enableSensor {
|
||||
MotionSensor.shared.start()
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
func set(encoder: MTLRenderCommandEncoder) {
|
||||
encoder.setFrontFacing(.clockwise)
|
||||
encoder.setVertexBuffer(posBuffer, offset: 0, index: 0)
|
||||
encoder.setVertexBuffer(uvBuffer, offset: 0, index: 1)
|
||||
#if canImport(UIKit) && canImport(CoreMotion)
|
||||
if KSOptions.enableSensor, let matrix = MotionSensor.shared.matrix() {
|
||||
modelViewMatrix = matrix
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
@MainActor
|
||||
func touchesMoved(touch: UITouch) {
|
||||
#if canImport(UIKit)
|
||||
let view = touch.view
|
||||
#else
|
||||
let view: UIView? = nil
|
||||
#endif
|
||||
var distX = Float(touch.location(in: view).x - touch.previousLocation(in: view).x)
|
||||
var distY = Float(touch.location(in: view).y - touch.previousLocation(in: view).y)
|
||||
distX *= 0.005
|
||||
distY *= 0.005
|
||||
fingerRotationX -= distY * 60 / 100
|
||||
fingerRotationY -= distX * 60 / 100
|
||||
modelViewMatrix = matrix_identity_float4x4.rotateX(radians: fingerRotationX).rotateY(radians: fingerRotationY)
|
||||
}
|
||||
|
||||
func reset() {
|
||||
fingerRotationX = 0
|
||||
fingerRotationY = 0
|
||||
modelViewMatrix = matrix_identity_float4x4
|
||||
}
|
||||
|
||||
private static func genSphere() -> ([UInt16], [simd_float4], [simd_float2]) {
|
||||
let slicesCount = UInt16(200)
|
||||
let parallelsCount = slicesCount / 2
|
||||
let indicesCount = Int(slicesCount) * Int(parallelsCount) * 6
|
||||
var indices = [UInt16](repeating: 0, count: indicesCount)
|
||||
var positions = [simd_float4]()
|
||||
var uvs = [simd_float2]()
|
||||
var runCount = 0
|
||||
let radius = Float(1.0)
|
||||
let step = (2.0 * Float.pi) / Float(slicesCount)
|
||||
var i = UInt16(0)
|
||||
while i <= parallelsCount {
|
||||
var j = UInt16(0)
|
||||
while j <= slicesCount {
|
||||
let vertex0 = radius * sinf(step * Float(i)) * cosf(step * Float(j))
|
||||
let vertex1 = radius * cosf(step * Float(i))
|
||||
let vertex2 = radius * sinf(step * Float(i)) * sinf(step * Float(j))
|
||||
let vertex3 = Float(1.0)
|
||||
let vertex4 = Float(j) / Float(slicesCount)
|
||||
let vertex5 = Float(i) / Float(parallelsCount)
|
||||
positions.append([vertex0, vertex1, vertex2, vertex3])
|
||||
uvs.append([vertex4, vertex5])
|
||||
if i < parallelsCount, j < slicesCount {
|
||||
indices[runCount] = i * (slicesCount + 1) + j
|
||||
runCount += 1
|
||||
indices[runCount] = UInt16((i + 1) * (slicesCount + 1) + j)
|
||||
runCount += 1
|
||||
indices[runCount] = UInt16((i + 1) * (slicesCount + 1) + (j + 1))
|
||||
runCount += 1
|
||||
indices[runCount] = UInt16(i * (slicesCount + 1) + j)
|
||||
runCount += 1
|
||||
indices[runCount] = UInt16((i + 1) * (slicesCount + 1) + (j + 1))
|
||||
runCount += 1
|
||||
indices[runCount] = UInt16(i * (slicesCount + 1) + (j + 1))
|
||||
runCount += 1
|
||||
}
|
||||
j += 1
|
||||
}
|
||||
i += 1
|
||||
}
|
||||
return (indices, positions, uvs)
|
||||
}
|
||||
|
||||
func pipeline(planeCount: Int, bitDepth: Int32) -> MTLRenderPipelineState {
|
||||
switch planeCount {
|
||||
case 3:
|
||||
if bitDepth == 10 {
|
||||
return yuvp010LE
|
||||
} else {
|
||||
return yuv
|
||||
}
|
||||
case 2:
|
||||
if bitDepth == 10 {
|
||||
return p010LE
|
||||
} else {
|
||||
return nv12
|
||||
}
|
||||
case 1:
|
||||
return bgra
|
||||
default:
|
||||
return bgra
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class VRDisplayModel: SphereDisplayModel {
|
||||
private let modelViewProjectionMatrix: simd_float4x4
|
||||
|
||||
override required init() {
|
||||
let size = KSOptions.sceneSize
|
||||
let aspect = Float(size.width / size.height)
|
||||
let projectionMatrix = simd_float4x4(perspective: Float.pi / 3, aspect: aspect, nearZ: 0.1, farZ: 400.0)
|
||||
let viewMatrix = simd_float4x4(lookAt: SIMD3<Float>.zero, center: [0, 0, -1000], up: [0, 1, 0])
|
||||
modelViewProjectionMatrix = projectionMatrix * viewMatrix
|
||||
super.init()
|
||||
}
|
||||
|
||||
override func set(encoder: MTLRenderCommandEncoder) {
|
||||
super.set(encoder: encoder)
|
||||
var matrix = modelViewProjectionMatrix * modelViewMatrix
|
||||
let matrixBuffer = MetalRender.device.makeBuffer(bytes: &matrix, length: MemoryLayout<simd_float4x4>.size)
|
||||
encoder.setVertexBuffer(matrixBuffer, offset: 0, index: 2)
|
||||
encoder.drawIndexedPrimitives(type: primitiveType, indexCount: indexCount, indexType: indexType, indexBuffer: indexBuffer, indexBufferOffset: 0)
|
||||
}
|
||||
}
|
||||
|
||||
private class VRBoxDisplayModel: SphereDisplayModel {
|
||||
private let modelViewProjectionMatrixLeft: simd_float4x4
|
||||
private let modelViewProjectionMatrixRight: simd_float4x4
|
||||
override required init() {
|
||||
let size = KSOptions.sceneSize
|
||||
let aspect = Float(size.width / size.height) / 2
|
||||
let viewMatrixLeft = simd_float4x4(lookAt: [-0.012, 0, 0], center: [0, 0, -1000], up: [0, 1, 0])
|
||||
let viewMatrixRight = simd_float4x4(lookAt: [0.012, 0, 0], center: [0, 0, -1000], up: [0, 1, 0])
|
||||
let projectionMatrix = simd_float4x4(perspective: Float.pi / 3, aspect: aspect, nearZ: 0.1, farZ: 400.0)
|
||||
modelViewProjectionMatrixLeft = projectionMatrix * viewMatrixLeft
|
||||
modelViewProjectionMatrixRight = projectionMatrix * viewMatrixRight
|
||||
super.init()
|
||||
}
|
||||
|
||||
override func set(encoder: MTLRenderCommandEncoder) {
|
||||
super.set(encoder: encoder)
|
||||
let layerSize = KSOptions.sceneSize
|
||||
let width = Double(layerSize.width / 2)
|
||||
[(modelViewProjectionMatrixLeft, MTLViewport(originX: 0, originY: 0, width: width, height: Double(layerSize.height), znear: 0, zfar: 0)),
|
||||
(modelViewProjectionMatrixRight, MTLViewport(originX: width, originY: 0, width: width, height: Double(layerSize.height), znear: 0, zfar: 0))].forEach { modelViewProjectionMatrix, viewport in
|
||||
encoder.setViewport(viewport)
|
||||
var matrix = modelViewProjectionMatrix * modelViewMatrix
|
||||
let matrixBuffer = MetalRender.device.makeBuffer(bytes: &matrix, length: MemoryLayout<simd_float4x4>.size)
|
||||
encoder.setVertexBuffer(matrixBuffer, offset: 0, index: 2)
|
||||
encoder.drawIndexedPrimitives(type: primitiveType, indexCount: indexCount, indexType: indexType, indexBuffer: indexBuffer, indexBufferOffset: 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
214
KSPlayer-main/Sources/KSPlayer/Metal/MetalRender.swift
Normal file
214
KSPlayer-main/Sources/KSPlayer/Metal/MetalRender.swift
Normal file
@@ -0,0 +1,214 @@
|
||||
//
|
||||
// MetalRender.swift
|
||||
// KSPlayer-iOS
|
||||
//
|
||||
// Created by kintan on 2020/1/11.
|
||||
//
|
||||
import Accelerate
|
||||
import CoreVideo
|
||||
import Foundation
|
||||
import Metal
|
||||
import QuartzCore
|
||||
import simd
|
||||
|
||||
class MetalRender {
|
||||
static let device = MTLCreateSystemDefaultDevice()!
|
||||
static let library: MTLLibrary = {
|
||||
var library: MTLLibrary!
|
||||
library = device.makeDefaultLibrary()
|
||||
if library == nil {
|
||||
library = try? device.makeDefaultLibrary(bundle: .module)
|
||||
}
|
||||
return library
|
||||
}()
|
||||
|
||||
private let renderPassDescriptor = MTLRenderPassDescriptor()
|
||||
private let commandQueue = MetalRender.device.makeCommandQueue()
|
||||
private lazy var samplerState: MTLSamplerState? = {
|
||||
let samplerDescriptor = MTLSamplerDescriptor()
|
||||
samplerDescriptor.minFilter = .linear
|
||||
samplerDescriptor.magFilter = .linear
|
||||
return MetalRender.device.makeSamplerState(descriptor: samplerDescriptor)
|
||||
}()
|
||||
|
||||
private lazy var colorConversion601VideoRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_601_4.pointee.videoRange.buffer
|
||||
|
||||
private lazy var colorConversion601FullRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_601_4.pointee.buffer
|
||||
|
||||
private lazy var colorConversion709VideoRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_709_2.pointee.videoRange.buffer
|
||||
|
||||
private lazy var colorConversion709FullRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_709_2.pointee.buffer
|
||||
|
||||
private lazy var colorConversionSMPTE240MVideoRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_SMPTE_240M_1995.videoRange.buffer
|
||||
|
||||
private lazy var colorConversionSMPTE240MFullRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_SMPTE_240M_1995.buffer
|
||||
|
||||
private lazy var colorConversion2020VideoRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_2020.videoRange.buffer
|
||||
|
||||
private lazy var colorConversion2020FullRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_2020.buffer
|
||||
|
||||
private lazy var colorOffsetVideoRangeMatrixBuffer: MTLBuffer? = {
|
||||
var firstColumn = SIMD3<Float>(-16.0 / 255.0, -128.0 / 255.0, -128.0 / 255.0)
|
||||
let buffer = MetalRender.device.makeBuffer(bytes: &firstColumn, length: MemoryLayout<SIMD3<Float>>.size)
|
||||
buffer?.label = "colorOffset"
|
||||
return buffer
|
||||
}()
|
||||
|
||||
private lazy var colorOffsetFullRangeMatrixBuffer: MTLBuffer? = {
|
||||
var firstColumn = SIMD3<Float>(0, -128.0 / 255.0, -128.0 / 255.0)
|
||||
let buffer = MetalRender.device.makeBuffer(bytes: &firstColumn, length: MemoryLayout<SIMD3<Float>>.size)
|
||||
buffer?.label = "colorOffset"
|
||||
return buffer
|
||||
}()
|
||||
|
||||
private lazy var leftShiftMatrixBuffer: MTLBuffer? = {
|
||||
var firstColumn = SIMD3<UInt8>(1, 1, 1)
|
||||
let buffer = MetalRender.device.makeBuffer(bytes: &firstColumn, length: MemoryLayout<SIMD3<UInt8>>.size)
|
||||
buffer?.label = "leftShit"
|
||||
return buffer
|
||||
}()
|
||||
|
||||
private lazy var leftShiftSixMatrixBuffer: MTLBuffer? = {
|
||||
var firstColumn = SIMD3<UInt8>(64, 64, 64)
|
||||
let buffer = MetalRender.device.makeBuffer(bytes: &firstColumn, length: MemoryLayout<SIMD3<UInt8>>.size)
|
||||
buffer?.label = "leftShit"
|
||||
return buffer
|
||||
}()
|
||||
|
||||
func clear(drawable: MTLDrawable) {
|
||||
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0, green: 0, blue: 0, alpha: 0)
|
||||
renderPassDescriptor.colorAttachments[0].loadAction = .clear
|
||||
guard let commandBuffer = commandQueue?.makeCommandBuffer(),
|
||||
let encoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
|
||||
else {
|
||||
return
|
||||
}
|
||||
encoder.endEncoding()
|
||||
commandBuffer.present(drawable)
|
||||
commandBuffer.commit()
|
||||
commandBuffer.waitUntilCompleted()
|
||||
}
|
||||
|
||||
@MainActor
|
||||
func draw(pixelBuffer: PixelBufferProtocol, display: DisplayEnum = .plane, drawable: CAMetalDrawable) {
|
||||
let inputTextures = pixelBuffer.textures()
|
||||
renderPassDescriptor.colorAttachments[0].texture = drawable.texture
|
||||
guard !inputTextures.isEmpty, let commandBuffer = commandQueue?.makeCommandBuffer(), let encoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else {
|
||||
return
|
||||
}
|
||||
encoder.pushDebugGroup("RenderFrame")
|
||||
let state = display.pipeline(planeCount: pixelBuffer.planeCount, bitDepth: pixelBuffer.bitDepth)
|
||||
encoder.setRenderPipelineState(state)
|
||||
encoder.setFragmentSamplerState(samplerState, index: 0)
|
||||
for (index, texture) in inputTextures.enumerated() {
|
||||
texture.label = "texture\(index)"
|
||||
encoder.setFragmentTexture(texture, index: index)
|
||||
}
|
||||
setFragmentBuffer(pixelBuffer: pixelBuffer, encoder: encoder)
|
||||
display.set(encoder: encoder)
|
||||
encoder.popDebugGroup()
|
||||
encoder.endEncoding()
|
||||
commandBuffer.present(drawable)
|
||||
commandBuffer.commit()
|
||||
commandBuffer.waitUntilCompleted()
|
||||
}
|
||||
|
||||
private func setFragmentBuffer(pixelBuffer: PixelBufferProtocol, encoder: MTLRenderCommandEncoder) {
|
||||
if pixelBuffer.planeCount > 1 {
|
||||
let buffer: MTLBuffer?
|
||||
let yCbCrMatrix = pixelBuffer.yCbCrMatrix
|
||||
let isFullRangeVideo = pixelBuffer.isFullRangeVideo
|
||||
if yCbCrMatrix == kCVImageBufferYCbCrMatrix_ITU_R_709_2 {
|
||||
buffer = isFullRangeVideo ? colorConversion709FullRangeMatrixBuffer : colorConversion709VideoRangeMatrixBuffer
|
||||
} else if yCbCrMatrix == kCVImageBufferYCbCrMatrix_SMPTE_240M_1995 {
|
||||
buffer = isFullRangeVideo ? colorConversionSMPTE240MFullRangeMatrixBuffer : colorConversionSMPTE240MVideoRangeMatrixBuffer
|
||||
} else if yCbCrMatrix == kCVImageBufferYCbCrMatrix_ITU_R_2020 {
|
||||
buffer = isFullRangeVideo ? colorConversion2020FullRangeMatrixBuffer : colorConversion2020VideoRangeMatrixBuffer
|
||||
} else {
|
||||
buffer = isFullRangeVideo ? colorConversion601FullRangeMatrixBuffer : colorConversion601VideoRangeMatrixBuffer
|
||||
}
|
||||
encoder.setFragmentBuffer(buffer, offset: 0, index: 0)
|
||||
let colorOffset = isFullRangeVideo ? colorOffsetFullRangeMatrixBuffer : colorOffsetVideoRangeMatrixBuffer
|
||||
encoder.setFragmentBuffer(colorOffset, offset: 0, index: 1)
|
||||
let leftShift = pixelBuffer.leftShift == 0 ? leftShiftMatrixBuffer : leftShiftSixMatrixBuffer
|
||||
encoder.setFragmentBuffer(leftShift, offset: 0, index: 2)
|
||||
}
|
||||
}
|
||||
|
||||
static func makePipelineState(fragmentFunction: String, isSphere: Bool = false, bitDepth: Int32 = 8) -> MTLRenderPipelineState {
|
||||
let descriptor = MTLRenderPipelineDescriptor()
|
||||
descriptor.colorAttachments[0].pixelFormat = KSOptions.colorPixelFormat(bitDepth: bitDepth)
|
||||
descriptor.vertexFunction = library.makeFunction(name: isSphere ? "mapSphereTexture" : "mapTexture")
|
||||
descriptor.fragmentFunction = library.makeFunction(name: fragmentFunction)
|
||||
let vertexDescriptor = MTLVertexDescriptor()
|
||||
vertexDescriptor.attributes[0].format = .float4
|
||||
vertexDescriptor.attributes[0].bufferIndex = 0
|
||||
vertexDescriptor.attributes[0].offset = 0
|
||||
vertexDescriptor.attributes[1].format = .float2
|
||||
vertexDescriptor.attributes[1].bufferIndex = 1
|
||||
vertexDescriptor.attributes[1].offset = 0
|
||||
vertexDescriptor.layouts[0].stride = MemoryLayout<simd_float4>.stride
|
||||
vertexDescriptor.layouts[1].stride = MemoryLayout<simd_float2>.stride
|
||||
descriptor.vertexDescriptor = vertexDescriptor
|
||||
// swiftlint:disable force_try
|
||||
return try! library.device.makeRenderPipelineState(descriptor: descriptor)
|
||||
// swftlint:enable force_try
|
||||
}
|
||||
|
||||
static func texture(pixelBuffer: CVPixelBuffer) -> [MTLTexture] {
|
||||
guard let iosurface = CVPixelBufferGetIOSurface(pixelBuffer)?.takeUnretainedValue() else {
|
||||
return []
|
||||
}
|
||||
let formats = KSOptions.pixelFormat(planeCount: pixelBuffer.planeCount, bitDepth: pixelBuffer.bitDepth)
|
||||
return (0 ..< pixelBuffer.planeCount).compactMap { index in
|
||||
let width = pixelBuffer.widthOfPlane(at: index)
|
||||
let height = pixelBuffer.heightOfPlane(at: index)
|
||||
let descriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: formats[index], width: width, height: height, mipmapped: false)
|
||||
return device.makeTexture(descriptor: descriptor, iosurface: iosurface, plane: index)
|
||||
}
|
||||
}
|
||||
|
||||
static func textures(formats: [MTLPixelFormat], widths: [Int], heights: [Int], buffers: [MTLBuffer?], lineSizes: [Int]) -> [MTLTexture] {
|
||||
(0 ..< formats.count).compactMap { i in
|
||||
guard let buffer = buffers[i] else {
|
||||
return nil
|
||||
}
|
||||
let descriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: formats[i], width: widths[i], height: heights[i], mipmapped: false)
|
||||
descriptor.storageMode = buffer.storageMode
|
||||
return buffer.makeTexture(descriptor: descriptor, offset: 0, bytesPerRow: lineSizes[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// swiftlint:disable identifier_name
|
||||
// private let kvImage_YpCbCrToARGBMatrix_ITU_R_601_4 = vImage_YpCbCrToARGBMatrix(Kr: 0.299, Kb: 0.114)
|
||||
// private let kvImage_YpCbCrToARGBMatrix_ITU_R_709_2 = vImage_YpCbCrToARGBMatrix(Kr: 0.2126, Kb: 0.0722)
|
||||
private let kvImage_YpCbCrToARGBMatrix_SMPTE_240M_1995 = vImage_YpCbCrToARGBMatrix(Kr: 0.212, Kb: 0.087)
|
||||
private let kvImage_YpCbCrToARGBMatrix_ITU_R_2020 = vImage_YpCbCrToARGBMatrix(Kr: 0.2627, Kb: 0.0593)
|
||||
extension vImage_YpCbCrToARGBMatrix {
|
||||
/**
|
||||
https://en.wikipedia.org/wiki/YCbCr
|
||||
@textblock
|
||||
| R | | 1 0 2-2Kr | | Y' |
|
||||
| G | = | 1 -Kb * (2 - 2 * Kb) / Kg -Kr * (2 - 2 * Kr) / Kg | | Cb |
|
||||
| B | | 1 2 - 2 * Kb 0 | | Cr |
|
||||
@/textblock
|
||||
*/
|
||||
init(Kr: Float, Kb: Float) {
|
||||
let Kg = 1 - Kr - Kb
|
||||
self.init(Yp: 1, Cr_R: 2 - 2 * Kr, Cr_G: -Kr * (2 - 2 * Kr) / Kg, Cb_G: -Kb * (2 - 2 * Kb) / Kg, Cb_B: 2 - 2 * Kb)
|
||||
}
|
||||
|
||||
var videoRange: vImage_YpCbCrToARGBMatrix {
|
||||
vImage_YpCbCrToARGBMatrix(Yp: 255 / 219 * Yp, Cr_R: 255 / 224 * Cr_R, Cr_G: 255 / 224 * Cr_G, Cb_G: 255 / 224 * Cb_G, Cb_B: 255 / 224 * Cb_B)
|
||||
}
|
||||
|
||||
var buffer: MTLBuffer? {
|
||||
var matrix = simd_float3x3([Yp, Yp, Yp], [0.0, Cb_G, Cb_B], [Cr_R, Cr_G, 0.0])
|
||||
let buffer = MetalRender.device.makeBuffer(bytes: &matrix, length: MemoryLayout<simd_float3x3>.size)
|
||||
buffer?.label = "colorConversionMatrix"
|
||||
return buffer
|
||||
}
|
||||
}
|
||||
|
||||
// swiftlint:enable identifier_name
|
||||
89
KSPlayer-main/Sources/KSPlayer/Metal/MotionSensor.swift
Normal file
89
KSPlayer-main/Sources/KSPlayer/Metal/MotionSensor.swift
Normal file
@@ -0,0 +1,89 @@
|
||||
//
|
||||
// MotionSensor.swift
|
||||
// KSPlayer-iOS
|
||||
//
|
||||
// Created by kintan on 2020/1/13.
|
||||
//
|
||||
|
||||
#if canImport(UIKit) && canImport(CoreMotion)
|
||||
import CoreMotion
|
||||
import Foundation
|
||||
import simd
|
||||
import UIKit
|
||||
|
||||
@MainActor
|
||||
final class MotionSensor {
|
||||
static let shared = MotionSensor()
|
||||
private let manager = CMMotionManager()
|
||||
private let worldToInertialReferenceFrame = simd_float4x4(euler: -90, y: 0, z: 90)
|
||||
private var deviceToDisplay = simd_float4x4.identity
|
||||
private let defaultRadiansY: Float
|
||||
private var orientation = UIInterfaceOrientation.unknown {
|
||||
didSet {
|
||||
if oldValue != orientation {
|
||||
switch orientation {
|
||||
case .portraitUpsideDown:
|
||||
deviceToDisplay = simd_float4x4(euler: 0, y: 0, z: 180)
|
||||
case .landscapeRight:
|
||||
deviceToDisplay = simd_float4x4(euler: 0, y: 0, z: -90)
|
||||
case .landscapeLeft:
|
||||
deviceToDisplay = simd_float4x4(euler: 0, y: 0, z: 90)
|
||||
default:
|
||||
deviceToDisplay = simd_float4x4.identity
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private init() {
|
||||
switch KSOptions.windowScene?.interfaceOrientation {
|
||||
case .landscapeRight:
|
||||
defaultRadiansY = -.pi / 2
|
||||
case .landscapeLeft:
|
||||
defaultRadiansY = .pi / 2
|
||||
default:
|
||||
defaultRadiansY = 0
|
||||
}
|
||||
}
|
||||
|
||||
func ready() -> Bool {
|
||||
manager.isDeviceMotionAvailable ? manager.isDeviceMotionActive : false
|
||||
}
|
||||
|
||||
func start() {
|
||||
if manager.isDeviceMotionAvailable, !manager.isDeviceMotionActive {
|
||||
manager.deviceMotionUpdateInterval = 1 / 60
|
||||
manager.startDeviceMotionUpdates()
|
||||
}
|
||||
}
|
||||
|
||||
func stop() {
|
||||
manager.stopDeviceMotionUpdates()
|
||||
}
|
||||
|
||||
func matrix() -> simd_float4x4? {
|
||||
if var matrix = manager.deviceMotion.flatMap(simd_float4x4.init(motion:)) {
|
||||
matrix = matrix.transpose
|
||||
matrix *= worldToInertialReferenceFrame
|
||||
orientation = KSOptions.windowScene?.interfaceOrientation ?? .portrait
|
||||
matrix = deviceToDisplay * matrix
|
||||
matrix = matrix.rotateY(radians: defaultRadiansY)
|
||||
return matrix
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public extension simd_float4x4 {
|
||||
init(motion: CMDeviceMotion) {
|
||||
self.init(rotation: motion.attitude.rotationMatrix)
|
||||
}
|
||||
|
||||
init(rotation: CMRotationMatrix) {
|
||||
self.init(SIMD4<Float>(Float(rotation.m11), Float(rotation.m12), Float(rotation.m13), 0.0),
|
||||
SIMD4<Float>(Float(rotation.m21), Float(rotation.m22), Float(rotation.m23), 0.0),
|
||||
SIMD4<Float>(Float(rotation.m31), Float(rotation.m32), Float(rotation.m33), -1),
|
||||
SIMD4<Float>(0, 0, 0, 1))
|
||||
}
|
||||
}
|
||||
#endif
|
||||
283
KSPlayer-main/Sources/KSPlayer/Metal/PixelBufferProtocol.swift
Normal file
283
KSPlayer-main/Sources/KSPlayer/Metal/PixelBufferProtocol.swift
Normal file
@@ -0,0 +1,283 @@
|
||||
//
|
||||
// PixelBufferProtocol.swift
|
||||
// KSPlayer-iOS
|
||||
//
|
||||
// Created by kintan on 2019/12/31.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import CoreVideo
|
||||
import Foundation
|
||||
import Libavutil
|
||||
import simd
|
||||
import VideoToolbox
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#endif
|
||||
|
||||
public protocol PixelBufferProtocol: AnyObject {
|
||||
var width: Int { get }
|
||||
var height: Int { get }
|
||||
var bitDepth: Int32 { get }
|
||||
var leftShift: UInt8 { get }
|
||||
var planeCount: Int { get }
|
||||
var formatDescription: CMVideoFormatDescription? { get }
|
||||
var aspectRatio: CGSize { get set }
|
||||
var yCbCrMatrix: CFString? { get set }
|
||||
var colorPrimaries: CFString? { get set }
|
||||
var transferFunction: CFString? { get set }
|
||||
var colorspace: CGColorSpace? { get set }
|
||||
var cvPixelBuffer: CVPixelBuffer? { get }
|
||||
var isFullRangeVideo: Bool { get }
|
||||
func cgImage() -> CGImage?
|
||||
func textures() -> [MTLTexture]
|
||||
func widthOfPlane(at planeIndex: Int) -> Int
|
||||
func heightOfPlane(at planeIndex: Int) -> Int
|
||||
func matche(formatDescription: CMVideoFormatDescription) -> Bool
|
||||
}
|
||||
|
||||
extension PixelBufferProtocol {
|
||||
var size: CGSize { CGSize(width: width, height: height) }
|
||||
}
|
||||
|
||||
extension CVPixelBuffer: PixelBufferProtocol {
|
||||
public var leftShift: UInt8 { 0 }
|
||||
public var cvPixelBuffer: CVPixelBuffer? { self }
|
||||
public var width: Int { CVPixelBufferGetWidth(self) }
|
||||
public var height: Int { CVPixelBufferGetHeight(self) }
|
||||
public var aspectRatio: CGSize {
|
||||
get {
|
||||
if let ratio = CVBufferGetAttachment(self, kCVImageBufferPixelAspectRatioKey, nil)?.takeUnretainedValue() as? NSDictionary,
|
||||
let horizontal = (ratio[kCVImageBufferPixelAspectRatioHorizontalSpacingKey] as? NSNumber)?.intValue,
|
||||
let vertical = (ratio[kCVImageBufferPixelAspectRatioVerticalSpacingKey] as? NSNumber)?.intValue,
|
||||
horizontal > 0, vertical > 0
|
||||
{
|
||||
return CGSize(width: horizontal, height: vertical)
|
||||
} else {
|
||||
return CGSize(width: 1, height: 1)
|
||||
}
|
||||
}
|
||||
set {
|
||||
if let aspectRatio = newValue.aspectRatio {
|
||||
CVBufferSetAttachment(self, kCVImageBufferPixelAspectRatioKey, aspectRatio, .shouldPropagate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var isPlanar: Bool { CVPixelBufferIsPlanar(self) }
|
||||
|
||||
public var planeCount: Int { isPlanar ? CVPixelBufferGetPlaneCount(self) : 1 }
|
||||
public var formatDescription: CMVideoFormatDescription? {
|
||||
var formatDescription: CMVideoFormatDescription?
|
||||
let err = CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: self, formatDescriptionOut: &formatDescription)
|
||||
if err != noErr {
|
||||
KSLog("Error at CMVideoFormatDescriptionCreateForImageBuffer \(err)")
|
||||
}
|
||||
return formatDescription
|
||||
}
|
||||
|
||||
public var isFullRangeVideo: Bool {
|
||||
CVBufferGetAttachment(self, kCMFormatDescriptionExtension_FullRangeVideo, nil)?.takeUnretainedValue() as? Bool ?? false
|
||||
}
|
||||
|
||||
public var attachmentsDic: CFDictionary? {
|
||||
CVBufferGetAttachments(self, .shouldPropagate)
|
||||
}
|
||||
|
||||
public var yCbCrMatrix: CFString? {
|
||||
get {
|
||||
CVBufferGetAttachment(self, kCVImageBufferYCbCrMatrixKey, nil)?.takeUnretainedValue() as? NSString
|
||||
}
|
||||
set {
|
||||
if let newValue {
|
||||
CVBufferSetAttachment(self, kCVImageBufferYCbCrMatrixKey, newValue, .shouldPropagate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var colorPrimaries: CFString? {
|
||||
get {
|
||||
CVBufferGetAttachment(self, kCVImageBufferColorPrimariesKey, nil)?.takeUnretainedValue() as? NSString
|
||||
}
|
||||
set {
|
||||
if let newValue {
|
||||
CVBufferSetAttachment(self, kCVImageBufferColorPrimariesKey, newValue, .shouldPropagate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var transferFunction: CFString? {
|
||||
get {
|
||||
CVBufferGetAttachment(self, kCVImageBufferTransferFunctionKey, nil)?.takeUnretainedValue() as? NSString
|
||||
}
|
||||
set {
|
||||
if let newValue {
|
||||
CVBufferSetAttachment(self, kCVImageBufferTransferFunctionKey, newValue, .shouldPropagate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var colorspace: CGColorSpace? {
|
||||
get {
|
||||
#if os(macOS)
|
||||
return CVImageBufferGetColorSpace(self)?.takeUnretainedValue() ?? attachmentsDic.flatMap { CVImageBufferCreateColorSpaceFromAttachments($0)?.takeUnretainedValue() }
|
||||
#else
|
||||
return attachmentsDic.flatMap { CVImageBufferCreateColorSpaceFromAttachments($0)?.takeUnretainedValue() }
|
||||
#endif
|
||||
}
|
||||
set {
|
||||
if let newValue {
|
||||
CVBufferSetAttachment(self, kCVImageBufferCGColorSpaceKey, newValue, .shouldPropagate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var bitDepth: Int32 {
|
||||
CVPixelBufferGetPixelFormatType(self).bitDepth
|
||||
}
|
||||
|
||||
public func cgImage() -> CGImage? {
|
||||
var cgImage: CGImage?
|
||||
VTCreateCGImageFromCVPixelBuffer(self, options: nil, imageOut: &cgImage)
|
||||
return cgImage
|
||||
}
|
||||
|
||||
public func widthOfPlane(at planeIndex: Int) -> Int {
|
||||
CVPixelBufferGetWidthOfPlane(self, planeIndex)
|
||||
}
|
||||
|
||||
public func heightOfPlane(at planeIndex: Int) -> Int {
|
||||
CVPixelBufferGetHeightOfPlane(self, planeIndex)
|
||||
}
|
||||
|
||||
func baseAddressOfPlane(at planeIndex: Int) -> UnsafeMutableRawPointer? {
|
||||
CVPixelBufferGetBaseAddressOfPlane(self, planeIndex)
|
||||
}
|
||||
|
||||
public func textures() -> [MTLTexture] {
|
||||
MetalRender.texture(pixelBuffer: self)
|
||||
}
|
||||
|
||||
public func matche(formatDescription: CMVideoFormatDescription) -> Bool {
|
||||
CMVideoFormatDescriptionMatchesImageBuffer(formatDescription, imageBuffer: self)
|
||||
}
|
||||
}
|
||||
|
||||
class PixelBuffer: PixelBufferProtocol {
|
||||
let bitDepth: Int32
|
||||
let width: Int
|
||||
let height: Int
|
||||
let planeCount: Int
|
||||
var aspectRatio: CGSize
|
||||
let leftShift: UInt8
|
||||
let isFullRangeVideo: Bool
|
||||
var cvPixelBuffer: CVPixelBuffer? { nil }
|
||||
var colorPrimaries: CFString?
|
||||
var transferFunction: CFString?
|
||||
var yCbCrMatrix: CFString?
|
||||
var colorspace: CGColorSpace?
|
||||
var formatDescription: CMVideoFormatDescription? = nil
|
||||
private let format: AVPixelFormat
|
||||
private let formats: [MTLPixelFormat]
|
||||
private let widths: [Int]
|
||||
private let heights: [Int]
|
||||
private let buffers: [MTLBuffer?]
|
||||
private let lineSize: [Int]
|
||||
|
||||
init(frame: AVFrame) {
|
||||
yCbCrMatrix = frame.colorspace.ycbcrMatrix
|
||||
colorPrimaries = frame.color_primaries.colorPrimaries
|
||||
transferFunction = frame.color_trc.transferFunction
|
||||
colorspace = KSOptions.colorSpace(ycbcrMatrix: yCbCrMatrix, transferFunction: transferFunction)
|
||||
width = Int(frame.width)
|
||||
height = Int(frame.height)
|
||||
isFullRangeVideo = frame.color_range == AVCOL_RANGE_JPEG
|
||||
aspectRatio = frame.sample_aspect_ratio.size
|
||||
format = AVPixelFormat(rawValue: frame.format)
|
||||
leftShift = format.leftShift
|
||||
bitDepth = format.bitDepth
|
||||
planeCount = Int(format.planeCount)
|
||||
let desc = av_pix_fmt_desc_get(format)?.pointee
|
||||
let chromaW = desc?.log2_chroma_w == 1 ? 2 : 1
|
||||
let chromaH = desc?.log2_chroma_h == 1 ? 2 : 1
|
||||
switch planeCount {
|
||||
case 3:
|
||||
widths = [width, width / chromaW, width / chromaW]
|
||||
heights = [height, height / chromaH, height / chromaH]
|
||||
case 2:
|
||||
widths = [width, width / chromaW]
|
||||
heights = [height, height / chromaH]
|
||||
default:
|
||||
widths = [width]
|
||||
heights = [height]
|
||||
}
|
||||
formats = KSOptions.pixelFormat(planeCount: planeCount, bitDepth: bitDepth)
|
||||
var buffers = [MTLBuffer?]()
|
||||
var lineSize = [Int]()
|
||||
let bytes = Array(tuple: frame.data)
|
||||
let bytesPerRow = Array(tuple: frame.linesize).compactMap { Int($0) }
|
||||
for i in 0 ..< planeCount {
|
||||
let alignment = MetalRender.device.minimumLinearTextureAlignment(for: formats[i])
|
||||
lineSize.append(bytesPerRow[i].alignment(value: alignment))
|
||||
let buffer: MTLBuffer?
|
||||
let size = lineSize[i]
|
||||
let byteCount = bytesPerRow[i]
|
||||
let height = heights[i]
|
||||
if byteCount == size {
|
||||
buffer = MetalRender.device.makeBuffer(bytes: bytes[i]!, length: height * size)
|
||||
} else {
|
||||
buffer = MetalRender.device.makeBuffer(length: heights[i] * lineSize[i])
|
||||
let contents = buffer?.contents()
|
||||
let source = bytes[i]!
|
||||
var j = 0
|
||||
// 性能 while > stride(from:to:by:) > for in
|
||||
while j < height {
|
||||
contents?.advanced(by: j * size).copyMemory(from: source.advanced(by: j * byteCount), byteCount: byteCount)
|
||||
j += 1
|
||||
}
|
||||
}
|
||||
buffers.append(buffer)
|
||||
}
|
||||
self.lineSize = lineSize
|
||||
self.buffers = buffers
|
||||
}
|
||||
|
||||
func textures() -> [MTLTexture] {
|
||||
MetalRender.textures(formats: formats, widths: widths, heights: heights, buffers: buffers, lineSizes: lineSize)
|
||||
}
|
||||
|
||||
func widthOfPlane(at planeIndex: Int) -> Int {
|
||||
widths[planeIndex]
|
||||
}
|
||||
|
||||
func heightOfPlane(at planeIndex: Int) -> Int {
|
||||
heights[planeIndex]
|
||||
}
|
||||
|
||||
func cgImage() -> CGImage? {
|
||||
let image: CGImage?
|
||||
if format == AV_PIX_FMT_RGB24 {
|
||||
image = CGImage.make(rgbData: buffers[0]!.contents().assumingMemoryBound(to: UInt8.self), linesize: Int(lineSize[0]), width: width, height: height)
|
||||
} else {
|
||||
let scale = VideoSwresample(isDovi: false)
|
||||
image = scale.transfer(format: format, width: Int32(width), height: Int32(height), data: buffers.map { $0?.contents().assumingMemoryBound(to: UInt8.self) }, linesize: lineSize.map { Int32($0) })?.cgImage()
|
||||
scale.shutdown()
|
||||
}
|
||||
return image
|
||||
}
|
||||
|
||||
public func matche(formatDescription: CMVideoFormatDescription) -> Bool {
|
||||
self.formatDescription == formatDescription
|
||||
}
|
||||
}
|
||||
|
||||
extension CGSize {
|
||||
var aspectRatio: NSDictionary? {
|
||||
if width != 0, height != 0, width != height {
|
||||
return [kCVImageBufferPixelAspectRatioHorizontalSpacingKey: width,
|
||||
kCVImageBufferPixelAspectRatioVerticalSpacingKey: height]
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
103
KSPlayer-main/Sources/KSPlayer/Metal/Shaders.metal
Executable file
103
KSPlayer-main/Sources/KSPlayer/Metal/Shaders.metal
Executable file
@@ -0,0 +1,103 @@
|
||||
//
|
||||
// Shaders.metal
|
||||
#include <metal_stdlib>
|
||||
using namespace metal;
|
||||
|
||||
struct VertexIn
|
||||
{
|
||||
float4 pos [[attribute(0)]];
|
||||
float2 uv [[attribute(1)]];
|
||||
};
|
||||
|
||||
struct VertexOut {
|
||||
float4 renderedCoordinate [[position]];
|
||||
float2 textureCoordinate;
|
||||
};
|
||||
|
||||
vertex VertexOut mapTexture(VertexIn input [[stage_in]]) {
|
||||
VertexOut outVertex;
|
||||
outVertex.renderedCoordinate = input.pos;
|
||||
outVertex.textureCoordinate = input.uv;
|
||||
return outVertex;
|
||||
}
|
||||
|
||||
vertex VertexOut mapSphereTexture(VertexIn input [[stage_in]], constant float4x4& uniforms [[ buffer(2) ]]) {
|
||||
VertexOut outVertex;
|
||||
outVertex.renderedCoordinate = uniforms * input.pos;
|
||||
outVertex.textureCoordinate = input.uv;
|
||||
return outVertex;
|
||||
}
|
||||
|
||||
fragment half4 displayTexture(VertexOut mappingVertex [[ stage_in ]],
|
||||
texture2d<half, access::sample> texture [[ texture(0) ]]) {
|
||||
constexpr sampler s(address::clamp_to_edge, filter::linear);
|
||||
|
||||
return half4(texture.sample(s, mappingVertex.textureCoordinate));
|
||||
}
|
||||
|
||||
fragment half4 displayYUVTexture(VertexOut in [[ stage_in ]],
|
||||
texture2d<half> yTexture [[ texture(0) ]],
|
||||
texture2d<half> uTexture [[ texture(1) ]],
|
||||
texture2d<half> vTexture [[ texture(2) ]],
|
||||
sampler textureSampler [[ sampler(0) ]],
|
||||
constant float3x3& yuvToBGRMatrix [[ buffer(0) ]],
|
||||
constant float3& colorOffset [[ buffer(1) ]],
|
||||
constant uchar3& leftShift [[ buffer(2) ]])
|
||||
{
|
||||
half3 yuv;
|
||||
yuv.x = yTexture.sample(textureSampler, in.textureCoordinate).r;
|
||||
yuv.y = uTexture.sample(textureSampler, in.textureCoordinate).r;
|
||||
yuv.z = vTexture.sample(textureSampler, in.textureCoordinate).r;
|
||||
return half4(half3x3(yuvToBGRMatrix)*(yuv*half3(leftShift)+half3(colorOffset)), 1);
|
||||
}
|
||||
|
||||
|
||||
fragment half4 displayNV12Texture(VertexOut in [[ stage_in ]],
|
||||
texture2d<half> lumaTexture [[ texture(0) ]],
|
||||
texture2d<half> chromaTexture [[ texture(1) ]],
|
||||
sampler textureSampler [[ sampler(0) ]],
|
||||
constant float3x3& yuvToBGRMatrix [[ buffer(0) ]],
|
||||
constant float3& colorOffset [[ buffer(1) ]],
|
||||
constant uchar3& leftShift [[ buffer(2) ]])
|
||||
{
|
||||
half3 yuv;
|
||||
yuv.x = lumaTexture.sample(textureSampler, in.textureCoordinate).r;
|
||||
yuv.yz = chromaTexture.sample(textureSampler, in.textureCoordinate).rg;
|
||||
return half4(half3x3(yuvToBGRMatrix)*(yuv*half3(leftShift)+half3(colorOffset)), 1);
|
||||
}
|
||||
|
||||
half3 shaderLinearize(half3 rgb) {
|
||||
rgb = pow(max(rgb,0), half3(4096.0/(2523 * 128)));
|
||||
rgb = max(rgb - half3(3424./4096), 0.0) / (half3(2413./4096 * 32) - half3(2392./4096 * 32) * rgb);
|
||||
rgb = pow(rgb, half3(4096.0 * 4 / 2610));
|
||||
return rgb;
|
||||
}
|
||||
|
||||
half3 shaderDeLinearize(half3 rgb) {
|
||||
rgb = pow(max(rgb,0), half3(2610./4096 / 4));
|
||||
rgb = (half3(3424./4096) - half3(2413./4096 * 32) * rgb) / (half3(1.0) + half3(2392./4096 * 32) * rgb);
|
||||
rgb = pow(rgb, half3(2523./4096 * 128));
|
||||
return rgb;
|
||||
}
|
||||
|
||||
fragment half4 displayYCCTexture(VertexOut in [[ stage_in ]],
|
||||
texture2d<half> lumaTexture [[ texture(0) ]],
|
||||
texture2d<half> chromaTexture [[ texture(1) ]],
|
||||
sampler textureSampler [[ sampler(0) ]],
|
||||
constant float3x3& yuvToBGRMatrix [[ buffer(0) ]],
|
||||
constant float3& colorOffset [[ buffer(1) ]],
|
||||
constant uchar3& leftShift [[ buffer(2) ]])
|
||||
{
|
||||
half3 ipt;
|
||||
ipt.x = lumaTexture.sample(textureSampler, in.textureCoordinate).r;
|
||||
ipt.yz = chromaTexture.sample(textureSampler, in.textureCoordinate).rg;
|
||||
// half3x3 ipt2lms = half3x3{{1, 0.1952, 0.4104}, {1, -0.2278, 0.2264}, {1, 0.0652, -1.3538}};
|
||||
// half3x3 lms2rgb = half3x3{{3.238998, -0.719461, -0.002862}, {-2.272734, 1.874998, -0.268066}, {0.086733, -0.158947, 1.074494}};
|
||||
half3x3 ipt2lms = half3x3{{1, 799/8192, 1681/8192}, {1, -933/8192, 1091/8192}, {1, 267/8192, -5545/8192}};
|
||||
half3x3 lms2rgb = half3x3{{3.43661, -0.79133, -0.0259499}, {-2.50645, 1.98360, -0.0989137}, {0.06984, -0.192271, 1.12486}};
|
||||
half3 lms = ipt2lms*ipt;
|
||||
lms = shaderLinearize(lms);
|
||||
half3 rgb = lms2rgb*lms;
|
||||
rgb = shaderDeLinearize(rgb);
|
||||
return half4(rgb, 1);
|
||||
}
|
||||
110
KSPlayer-main/Sources/KSPlayer/Metal/Transforms.swift
Normal file
110
KSPlayer-main/Sources/KSPlayer/Metal/Transforms.swift
Normal file
@@ -0,0 +1,110 @@
|
||||
//
|
||||
// Transforms.swift
|
||||
// MetalSpectrograph
|
||||
//
|
||||
// Created by David Conner on 9/9/15.
|
||||
// Copyright © 2015 Voxxel. All rights reserved.
|
||||
//
|
||||
|
||||
import simd
|
||||
|
||||
// swiftlint:disable identifier_name
|
||||
extension simd_float4x4 {
|
||||
static let identity = matrix_identity_float4x4
|
||||
// sx 0 0 0
|
||||
// 0 sy 0 0
|
||||
// 0 0 sz 0
|
||||
// 0 0 0 1
|
||||
|
||||
init(scale x: Float, y: Float, z: Float) {
|
||||
self.init(diagonal: [x, y, z, 1.0])
|
||||
}
|
||||
|
||||
// 1 0 0 tx
|
||||
// 0 1 0 ty
|
||||
// 0 0 1 tz
|
||||
// 0 0 0 1
|
||||
init(translate: SIMD3<Float>) {
|
||||
self.init([1, 0.0, 0.0, translate.x],
|
||||
[0.0, 1, 0.0, translate.y],
|
||||
[0.0, 0.0, 1, translate.z],
|
||||
[0.0, 0.0, 0, 1])
|
||||
}
|
||||
|
||||
init(rotationX radians: Float) {
|
||||
let cos = cosf(radians)
|
||||
let sin = sinf(radians)
|
||||
self.init([1, 0.0, 0.0, 0],
|
||||
[0.0, cos, sin, 0],
|
||||
[0.0, -sin, cos, 0],
|
||||
[0.0, 0.0, 0, 1])
|
||||
}
|
||||
|
||||
init(rotationY radians: Float) {
|
||||
let cos = cosf(radians)
|
||||
let sin = sinf(radians)
|
||||
self.init([cos, 0.0, -sin, 0],
|
||||
[0.0, 1, 0, 0],
|
||||
[sin, 0, cos, 0],
|
||||
[0.0, 0.0, 0, 1])
|
||||
}
|
||||
|
||||
init(rotationZ radians: Float) {
|
||||
let cos = cosf(radians)
|
||||
let sin = sinf(radians)
|
||||
self.init([cos, sin, 0.0, 0],
|
||||
[-sin, cos, 0, 0],
|
||||
[0.0, 0, 1, 0],
|
||||
[0.0, 0.0, 0, 1])
|
||||
}
|
||||
|
||||
public init(lookAt eye: SIMD3<Float>, center: SIMD3<Float>, up: SIMD3<Float>) {
|
||||
let N = normalize(eye - center)
|
||||
let U = normalize(cross(up, N))
|
||||
let V = cross(N, U)
|
||||
self.init(rows: [[U.x, V.x, N.x, 0.0],
|
||||
[U.y, V.y, N.y, 0.0],
|
||||
[U.z, V.z, N.z, 0.0],
|
||||
[dot(-U, eye), dot(-V, eye), dot(-N, eye), 1.0]])
|
||||
}
|
||||
|
||||
public init(perspective fovyRadians: Float, aspect: Float, nearZ: Float, farZ: Float) {
|
||||
let cotan = 1.0 / tanf(fovyRadians / 2.0)
|
||||
self.init([cotan / aspect, 0.0, 0.0, 0.0],
|
||||
[0.0, cotan, 0.0, 0.0],
|
||||
[0.0, 0.0, (farZ + nearZ) / (nearZ - farZ), -1],
|
||||
[0.0, 0.0, (2.0 * farZ * nearZ) / (nearZ - farZ), 0])
|
||||
}
|
||||
|
||||
public init(euler x: Float, y: Float, z: Float) {
|
||||
let x = x * .pi / 180
|
||||
let y = y * .pi / 180
|
||||
let z = z * .pi / 180
|
||||
let cx = cos(x)
|
||||
let sx = sin(x)
|
||||
let cy = cos(y)
|
||||
let sy = sin(y)
|
||||
let cz = cos(z)
|
||||
let sz = sin(z)
|
||||
let cxsy = cx * sy
|
||||
let sxsy = sx * sy
|
||||
self.init([cy * cz, -cy * sz, sy, 0.0],
|
||||
[cxsy * cz + cx * sz, -cxsy * sz + cx * cz, -sx * cy, 0.0],
|
||||
[-sxsy * cz + sx * sz, sxsy * sz + sx * cz, cx * cy, 0],
|
||||
[0.0, 0.0, 0, 1])
|
||||
}
|
||||
|
||||
func rotateX(radians: Float) -> simd_float4x4 {
|
||||
self * simd_float4x4(rotationX: radians)
|
||||
}
|
||||
|
||||
func rotateY(radians: Float) -> simd_float4x4 {
|
||||
self * simd_float4x4(rotationY: radians)
|
||||
}
|
||||
|
||||
func rotateZ(radians: Float) -> simd_float4x4 {
|
||||
self * simd_float4x4(rotationZ: radians)
|
||||
}
|
||||
}
|
||||
|
||||
// swiftlint:enable identifier_name
|
||||
12
KSPlayer-main/Sources/KSPlayer/Subtitle/AudioRecognize.swift
Normal file
12
KSPlayer-main/Sources/KSPlayer/Subtitle/AudioRecognize.swift
Normal file
@@ -0,0 +1,12 @@
|
||||
//
|
||||
// AudioRecognize.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2023/9/23.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
public protocol AudioRecognize: SubtitleInfo {
|
||||
func append(frame: AudioFrame)
|
||||
}
|
||||
422
KSPlayer-main/Sources/KSPlayer/Subtitle/KSParseProtocol.swift
Normal file
422
KSPlayer-main/Sources/KSPlayer/Subtitle/KSParseProtocol.swift
Normal file
@@ -0,0 +1,422 @@
|
||||
//
|
||||
// KSParseProtocol.swift
|
||||
// KSPlayer-7de52535
|
||||
//
|
||||
// Created by kintan on 2018/8/7.
|
||||
//
|
||||
import Foundation
|
||||
import SwiftUI
|
||||
#if !canImport(UIKit)
|
||||
import AppKit
|
||||
#else
|
||||
import UIKit
|
||||
#endif
|
||||
public protocol KSParseProtocol {
|
||||
func canParse(scanner: Scanner) -> Bool
|
||||
func parsePart(scanner: Scanner) -> SubtitlePart?
|
||||
}
|
||||
|
||||
public extension KSOptions {
|
||||
static var subtitleParses: [KSParseProtocol] = [AssParse(), VTTParse(), SrtParse()]
|
||||
}
|
||||
|
||||
public extension String {}
|
||||
|
||||
public extension KSParseProtocol {
|
||||
func parse(scanner: Scanner) -> [SubtitlePart] {
|
||||
var groups = [SubtitlePart]()
|
||||
|
||||
while !scanner.isAtEnd {
|
||||
if let group = parsePart(scanner: scanner) {
|
||||
groups.append(group)
|
||||
}
|
||||
}
|
||||
groups = groups.mergeSortBottomUp { $0 < $1 }
|
||||
return groups
|
||||
}
|
||||
}
|
||||
|
||||
public class AssParse: KSParseProtocol {
|
||||
private var styleMap = [String: ASSStyle]()
|
||||
private var eventKeys = ["Layer", "Start", "End", "Style", "Name", "MarginL", "MarginR", "MarginV", "Effect", "Text"]
|
||||
private var playResX = Float(0.0)
|
||||
private var playResY = Float(0.0)
|
||||
public func canParse(scanner: Scanner) -> Bool {
|
||||
guard scanner.scanString("[Script Info]") != nil else {
|
||||
return false
|
||||
}
|
||||
while scanner.scanString("Format:") == nil {
|
||||
if scanner.scanString("PlayResX:") != nil {
|
||||
playResX = scanner.scanFloat() ?? 0
|
||||
} else if scanner.scanString("PlayResY:") != nil {
|
||||
playResY = scanner.scanFloat() ?? 0
|
||||
} else {
|
||||
_ = scanner.scanUpToCharacters(from: .newlines)
|
||||
}
|
||||
}
|
||||
guard var keys = scanner.scanUpToCharacters(from: .newlines)?.components(separatedBy: ",") else {
|
||||
return false
|
||||
}
|
||||
keys = keys.map { $0.trimmingCharacters(in: .whitespaces) }
|
||||
while scanner.scanString("Style:") != nil {
|
||||
_ = scanner.scanString("Format: ")
|
||||
guard let values = scanner.scanUpToCharacters(from: .newlines)?.components(separatedBy: ",") else {
|
||||
continue
|
||||
}
|
||||
var dic = [String: String]()
|
||||
for i in 1 ..< keys.count {
|
||||
dic[keys[i]] = values[i]
|
||||
}
|
||||
styleMap[values[0]] = dic.parseASSStyle()
|
||||
}
|
||||
_ = scanner.scanString("[Events]")
|
||||
if scanner.scanString("Format: ") != nil {
|
||||
guard let keys = scanner.scanUpToCharacters(from: .newlines)?.components(separatedBy: ",") else {
|
||||
return false
|
||||
}
|
||||
eventKeys = keys.map { $0.trimmingCharacters(in: .whitespaces) }
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Dialogue: 0,0:12:37.73,0:12:38.83,Aki Default,,0,0,0,,{\be8}原来如此
|
||||
// ffmpeg 软解的字幕
|
||||
// 875,,Default,NTP,0000,0000,0000,!Effect,- 你们两个别冲这么快\\N- 我会取消所有行程尽快赶过去
|
||||
public func parsePart(scanner: Scanner) -> SubtitlePart? {
|
||||
let isDialogue = scanner.scanString("Dialogue") != nil
|
||||
var dic = [String: String]()
|
||||
for i in 0 ..< eventKeys.count {
|
||||
if !isDialogue, i == 1 {
|
||||
continue
|
||||
}
|
||||
if i == eventKeys.count - 1 {
|
||||
dic[eventKeys[i]] = scanner.scanUpToCharacters(from: .newlines)
|
||||
} else {
|
||||
dic[eventKeys[i]] = scanner.scanUpToString(",")
|
||||
_ = scanner.scanString(",")
|
||||
}
|
||||
}
|
||||
let start: TimeInterval
|
||||
let end: TimeInterval
|
||||
if let startString = dic["Start"], let endString = dic["End"] {
|
||||
start = startString.parseDuration()
|
||||
end = endString.parseDuration()
|
||||
} else {
|
||||
if isDialogue {
|
||||
return nil
|
||||
} else {
|
||||
start = 0
|
||||
end = 0
|
||||
}
|
||||
}
|
||||
var attributes: [NSAttributedString.Key: Any]?
|
||||
var textPosition: TextPosition
|
||||
if let style = dic["Style"], let assStyle = styleMap[style] {
|
||||
attributes = assStyle.attrs
|
||||
textPosition = assStyle.textPosition
|
||||
if let marginL = dic["MarginL"].flatMap(Double.init), marginL != 0 {
|
||||
textPosition.leftMargin = CGFloat(marginL)
|
||||
}
|
||||
if let marginR = dic["MarginR"].flatMap(Double.init), marginR != 0 {
|
||||
textPosition.rightMargin = CGFloat(marginR)
|
||||
}
|
||||
if let marginV = dic["MarginV"].flatMap(Double.init), marginV != 0 {
|
||||
textPosition.verticalMargin = CGFloat(marginV)
|
||||
}
|
||||
} else {
|
||||
textPosition = TextPosition()
|
||||
}
|
||||
guard var text = dic["Text"] else {
|
||||
return nil
|
||||
}
|
||||
text = text.replacingOccurrences(of: "\\N", with: "\n")
|
||||
text = text.replacingOccurrences(of: "\\n", with: "\n")
|
||||
let part = SubtitlePart(start, end, attributedString: text.build(textPosition: &textPosition, attributed: attributes))
|
||||
part.textPosition = textPosition
|
||||
return part
|
||||
}
|
||||
}
|
||||
|
||||
public struct ASSStyle {
|
||||
let attrs: [NSAttributedString.Key: Any]
|
||||
let textPosition: TextPosition
|
||||
}
|
||||
|
||||
// swiftlint:disable cyclomatic_complexity
|
||||
extension String {
|
||||
func build(textPosition: inout TextPosition, attributed: [NSAttributedString.Key: Any]? = nil) -> NSAttributedString {
|
||||
let lineCodes = splitStyle()
|
||||
let attributedStr = NSMutableAttributedString()
|
||||
var attributed = attributed ?? [:]
|
||||
for lineCode in lineCodes {
|
||||
attributedStr.append(lineCode.0.parseStyle(attributes: &attributed, style: lineCode.1, textPosition: &textPosition))
|
||||
}
|
||||
return attributedStr
|
||||
}
|
||||
|
||||
func splitStyle() -> [(String, String?)] {
|
||||
let scanner = Scanner(string: self)
|
||||
scanner.charactersToBeSkipped = nil
|
||||
var result = [(String, String?)]()
|
||||
var sytle: String?
|
||||
while !scanner.isAtEnd {
|
||||
if scanner.scanString("{") != nil {
|
||||
sytle = scanner.scanUpToString("}")
|
||||
_ = scanner.scanString("}")
|
||||
} else if let text = scanner.scanUpToString("{") {
|
||||
result.append((text, sytle))
|
||||
} else if let text = scanner.scanUpToCharacters(from: .newlines) {
|
||||
result.append((text, sytle))
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func parseStyle(attributes: inout [NSAttributedString.Key: Any], style: String?, textPosition: inout TextPosition) -> NSAttributedString {
|
||||
guard let style else {
|
||||
return NSAttributedString(string: self, attributes: attributes)
|
||||
}
|
||||
var fontName: String?
|
||||
var fontSize: Float?
|
||||
let subStyleArr = style.components(separatedBy: "\\")
|
||||
var shadow = attributes[.shadow] as? NSShadow
|
||||
for item in subStyleArr {
|
||||
let itemStr = item.replacingOccurrences(of: " ", with: "")
|
||||
let scanner = Scanner(string: itemStr)
|
||||
let char = scanner.scanCharacter()
|
||||
switch char {
|
||||
case "a":
|
||||
let char = scanner.scanCharacter()
|
||||
if char == "n" {
|
||||
textPosition.ass(alignment: scanner.scanUpToCharacters(from: .newlines))
|
||||
}
|
||||
case "b":
|
||||
attributes[.expansion] = scanner.scanFloat()
|
||||
case "c":
|
||||
attributes[.foregroundColor] = scanner.scanUpToCharacters(from: .newlines).flatMap(UIColor.init(assColor:))
|
||||
case "f":
|
||||
let char = scanner.scanCharacter()
|
||||
if char == "n" {
|
||||
fontName = scanner.scanUpToCharacters(from: .newlines)
|
||||
} else if char == "s" {
|
||||
fontSize = scanner.scanFloat()
|
||||
}
|
||||
case "i":
|
||||
attributes[.obliqueness] = scanner.scanFloat()
|
||||
case "s":
|
||||
if scanner.scanString("had") != nil {
|
||||
if let size = scanner.scanFloat() {
|
||||
shadow = shadow ?? NSShadow()
|
||||
shadow?.shadowOffset = CGSize(width: CGFloat(size), height: CGFloat(size))
|
||||
shadow?.shadowBlurRadius = CGFloat(size)
|
||||
}
|
||||
attributes[.shadow] = shadow
|
||||
} else {
|
||||
attributes[.strikethroughStyle] = scanner.scanInt()
|
||||
}
|
||||
case "u":
|
||||
attributes[.underlineStyle] = scanner.scanInt()
|
||||
case "1", "2", "3", "4":
|
||||
let twoChar = scanner.scanCharacter()
|
||||
if twoChar == "c" {
|
||||
let color = scanner.scanUpToCharacters(from: .newlines).flatMap(UIColor.init(assColor:))
|
||||
if char == "1" {
|
||||
attributes[.foregroundColor] = color
|
||||
} else if char == "2" {
|
||||
// 还不知道这个要设置到什么颜色上
|
||||
// attributes[.backgroundColor] = color
|
||||
} else if char == "3" {
|
||||
attributes[.strokeColor] = color
|
||||
} else if char == "4" {
|
||||
shadow = shadow ?? NSShadow()
|
||||
shadow?.shadowColor = color
|
||||
attributes[.shadow] = shadow
|
||||
}
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
// Apply font attributes if available
|
||||
if let fontName, let fontSize {
|
||||
let font = UIFont(name: fontName, size: CGFloat(fontSize)) ?? UIFont.systemFont(ofSize: CGFloat(fontSize))
|
||||
attributes[.font] = font
|
||||
}
|
||||
return NSAttributedString(string: self, attributes: attributes)
|
||||
}
|
||||
}
|
||||
|
||||
public extension [String: String] {
|
||||
func parseASSStyle() -> ASSStyle {
|
||||
var attributes: [NSAttributedString.Key: Any] = [:]
|
||||
if let fontName = self["Fontname"], let fontSize = self["Fontsize"].flatMap(Double.init) {
|
||||
var font = UIFont(name: fontName, size: fontSize) ?? UIFont.systemFont(ofSize: fontSize)
|
||||
if let degrees = self["Angle"].flatMap(Double.init), degrees != 0 {
|
||||
let radians = CGFloat(degrees * .pi / 180.0)
|
||||
#if !canImport(UIKit)
|
||||
let matrix = AffineTransform(rotationByRadians: radians)
|
||||
#else
|
||||
let matrix = CGAffineTransform(rotationAngle: radians)
|
||||
#endif
|
||||
let fontDescriptor = UIFontDescriptor(name: fontName, matrix: matrix)
|
||||
font = UIFont(descriptor: fontDescriptor, size: fontSize) ?? font
|
||||
}
|
||||
attributes[.font] = font
|
||||
}
|
||||
// 创建字体样式
|
||||
if let assColor = self["PrimaryColour"] {
|
||||
attributes[.foregroundColor] = UIColor(assColor: assColor)
|
||||
}
|
||||
// 还不知道这个要设置到什么颜色上
|
||||
if let assColor = self["SecondaryColour"] {
|
||||
// attributes[.backgroundColor] = UIColor(assColor: assColor)
|
||||
}
|
||||
if self["Bold"] == "1" {
|
||||
attributes[.expansion] = 1
|
||||
}
|
||||
if self["Italic"] == "1" {
|
||||
attributes[.obliqueness] = 1
|
||||
}
|
||||
if self["Underline"] == "1" {
|
||||
attributes[.underlineStyle] = NSUnderlineStyle.single.rawValue
|
||||
}
|
||||
if self["StrikeOut"] == "1" {
|
||||
attributes[.strikethroughStyle] = NSUnderlineStyle.single.rawValue
|
||||
}
|
||||
|
||||
// if let scaleX = self["ScaleX"].flatMap(Double.init), scaleX != 100 {
|
||||
// attributes[.expansion] = scaleX / 100.0
|
||||
// }
|
||||
// if let scaleY = self["ScaleY"].flatMap(Double.init), scaleY != 100 {
|
||||
// attributes[.baselineOffset] = scaleY - 100.0
|
||||
// }
|
||||
|
||||
// if let spacing = self["Spacing"].flatMap(Double.init) {
|
||||
// attributes[.kern] = CGFloat(spacing)
|
||||
// }
|
||||
|
||||
if self["BorderStyle"] == "1" {
|
||||
if let strokeWidth = self["Outline"].flatMap(Double.init), strokeWidth > 0 {
|
||||
attributes[.strokeWidth] = -strokeWidth
|
||||
if let assColor = self["OutlineColour"] {
|
||||
attributes[.strokeColor] = UIColor(assColor: assColor)
|
||||
}
|
||||
}
|
||||
if let assColor = self["BackColour"],
|
||||
let shadowOffset = self["Shadow"].flatMap(Double.init),
|
||||
shadowOffset > 0
|
||||
{
|
||||
let shadow = NSShadow()
|
||||
shadow.shadowOffset = CGSize(width: CGFloat(shadowOffset), height: CGFloat(shadowOffset))
|
||||
shadow.shadowBlurRadius = shadowOffset
|
||||
shadow.shadowColor = UIColor(assColor: assColor)
|
||||
attributes[.shadow] = shadow
|
||||
}
|
||||
}
|
||||
var textPosition = TextPosition()
|
||||
textPosition.ass(alignment: self["Alignment"])
|
||||
if let marginL = self["MarginL"].flatMap(Double.init) {
|
||||
textPosition.leftMargin = CGFloat(marginL)
|
||||
}
|
||||
if let marginR = self["MarginR"].flatMap(Double.init) {
|
||||
textPosition.rightMargin = CGFloat(marginR)
|
||||
}
|
||||
if let marginV = self["MarginV"].flatMap(Double.init) {
|
||||
textPosition.verticalMargin = CGFloat(marginV)
|
||||
}
|
||||
return ASSStyle(attrs: attributes, textPosition: textPosition)
|
||||
}
|
||||
// swiftlint:enable cyclomatic_complexity
|
||||
}
|
||||
|
||||
public class VTTParse: KSParseProtocol {
|
||||
public func canParse(scanner: Scanner) -> Bool {
|
||||
let result = scanner.scanString("WEBVTT")
|
||||
if result != nil {
|
||||
scanner.charactersToBeSkipped = nil
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
00:00.430 --> 00:03.380
|
||||
简中封装 by Q66
|
||||
*/
|
||||
public func parsePart(scanner: Scanner) -> SubtitlePart? {
|
||||
var timeStrs: String?
|
||||
repeat {
|
||||
timeStrs = scanner.scanUpToCharacters(from: .newlines)
|
||||
_ = scanner.scanCharacters(from: .newlines)
|
||||
} while !(timeStrs?.contains("-->") ?? false) && !scanner.isAtEnd
|
||||
guard let timeStrs else {
|
||||
return nil
|
||||
}
|
||||
let timeArray: [String] = timeStrs.components(separatedBy: "-->")
|
||||
if timeArray.count == 2 {
|
||||
let startString = timeArray[0]
|
||||
let endString = timeArray[1]
|
||||
_ = scanner.scanCharacters(from: .newlines)
|
||||
var text = ""
|
||||
var newLine: String? = nil
|
||||
repeat {
|
||||
if let str = scanner.scanUpToCharacters(from: .newlines) {
|
||||
text += str
|
||||
}
|
||||
newLine = scanner.scanCharacters(from: .newlines)
|
||||
if newLine == "\n" || newLine == "\r\n" {
|
||||
text += "\n"
|
||||
}
|
||||
} while newLine == "\n" || newLine == "\r\n"
|
||||
var textPosition = TextPosition()
|
||||
return SubtitlePart(startString.parseDuration(), endString.parseDuration(), attributedString: text.build(textPosition: &textPosition))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public class SrtParse: KSParseProtocol {
|
||||
public func canParse(scanner: Scanner) -> Bool {
|
||||
let result = scanner.string.contains(" --> ")
|
||||
if result {
|
||||
scanner.charactersToBeSkipped = nil
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
45
|
||||
00:02:52,184 --> 00:02:53,617
|
||||
{\an4}慢慢来
|
||||
*/
|
||||
public func parsePart(scanner: Scanner) -> SubtitlePart? {
|
||||
var decimal: String?
|
||||
repeat {
|
||||
decimal = scanner.scanUpToCharacters(from: .newlines)
|
||||
_ = scanner.scanCharacters(from: .newlines)
|
||||
} while decimal.flatMap(Int.init) == nil
|
||||
let startString = scanner.scanUpToString("-->")
|
||||
// skip spaces and newlines by default.
|
||||
_ = scanner.scanString("-->")
|
||||
if let startString,
|
||||
let endString = scanner.scanUpToCharacters(from: .newlines)
|
||||
{
|
||||
_ = scanner.scanCharacters(from: .newlines)
|
||||
var text = ""
|
||||
var newLine: String? = nil
|
||||
repeat {
|
||||
if let str = scanner.scanUpToCharacters(from: .newlines) {
|
||||
text += str
|
||||
}
|
||||
newLine = scanner.scanCharacters(from: .newlines)
|
||||
if newLine == "\n" || newLine == "\r\n" {
|
||||
text += "\n"
|
||||
}
|
||||
} while newLine == "\n" || newLine == "\r\n"
|
||||
var textPosition = TextPosition()
|
||||
return SubtitlePart(startString.parseDuration(), endString.parseDuration(), attributedString: text.build(textPosition: &textPosition))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
386
KSPlayer-main/Sources/KSPlayer/Subtitle/KSSubtitle.swift
Normal file
386
KSPlayer-main/Sources/KSPlayer/Subtitle/KSSubtitle.swift
Normal file
@@ -0,0 +1,386 @@
|
||||
//
|
||||
// KSSubtitle.swift
|
||||
// Pods
|
||||
//
|
||||
// Created by kintan on 2017/4/2.
|
||||
//
|
||||
//
|
||||
|
||||
import CoreFoundation
|
||||
import CoreGraphics
|
||||
import Foundation
|
||||
import SwiftUI
|
||||
|
||||
public class SubtitlePart: CustomStringConvertible, Identifiable {
|
||||
public var start: TimeInterval
|
||||
public var end: TimeInterval
|
||||
public var origin: CGPoint = .zero
|
||||
public let text: NSAttributedString?
|
||||
public var image: UIImage?
|
||||
public var textPosition: TextPosition?
|
||||
public var description: String {
|
||||
"Subtile Group ==========\nstart: \(start)\nend:\(end)\ntext:\(String(describing: text))"
|
||||
}
|
||||
|
||||
public convenience init(_ start: TimeInterval, _ end: TimeInterval, _ string: String) {
|
||||
var text = string
|
||||
text = text.trimmingCharacters(in: .whitespaces)
|
||||
text = text.replacingOccurrences(of: "\r", with: "")
|
||||
self.init(start, end, attributedString: NSAttributedString(string: text))
|
||||
}
|
||||
|
||||
public init(_ start: TimeInterval, _ end: TimeInterval, attributedString: NSAttributedString?) {
|
||||
self.start = start
|
||||
self.end = end
|
||||
text = attributedString
|
||||
}
|
||||
}
|
||||
|
||||
public struct TextPosition {
|
||||
public var verticalAlign: VerticalAlignment = .bottom
|
||||
public var horizontalAlign: HorizontalAlignment = .center
|
||||
public var leftMargin: CGFloat = 0
|
||||
public var rightMargin: CGFloat = 0
|
||||
public var verticalMargin: CGFloat = 10
|
||||
public var edgeInsets: EdgeInsets {
|
||||
var edgeInsets = EdgeInsets()
|
||||
if verticalAlign == .bottom {
|
||||
edgeInsets.bottom = verticalMargin
|
||||
} else if verticalAlign == .top {
|
||||
edgeInsets.top = verticalMargin
|
||||
}
|
||||
if horizontalAlign == .leading {
|
||||
edgeInsets.leading = leftMargin
|
||||
}
|
||||
if horizontalAlign == .trailing {
|
||||
edgeInsets.trailing = rightMargin
|
||||
}
|
||||
return edgeInsets
|
||||
}
|
||||
|
||||
public mutating func ass(alignment: String?) {
|
||||
switch alignment {
|
||||
case "1":
|
||||
verticalAlign = .bottom
|
||||
horizontalAlign = .leading
|
||||
case "2":
|
||||
verticalAlign = .bottom
|
||||
horizontalAlign = .center
|
||||
case "3":
|
||||
verticalAlign = .bottom
|
||||
horizontalAlign = .trailing
|
||||
case "4":
|
||||
verticalAlign = .center
|
||||
horizontalAlign = .leading
|
||||
case "5":
|
||||
verticalAlign = .center
|
||||
horizontalAlign = .center
|
||||
case "6":
|
||||
verticalAlign = .center
|
||||
horizontalAlign = .trailing
|
||||
case "7":
|
||||
verticalAlign = .top
|
||||
horizontalAlign = .leading
|
||||
case "8":
|
||||
verticalAlign = .top
|
||||
horizontalAlign = .center
|
||||
case "9":
|
||||
verticalAlign = .top
|
||||
horizontalAlign = .trailing
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension SubtitlePart: Comparable {
|
||||
public static func == (left: SubtitlePart, right: SubtitlePart) -> Bool {
|
||||
if left.start == right.start, left.end == right.end {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
public static func < (left: SubtitlePart, right: SubtitlePart) -> Bool {
|
||||
if left.start < right.start {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension SubtitlePart: NumericComparable {
|
||||
public typealias Compare = TimeInterval
|
||||
public static func == (left: SubtitlePart, right: TimeInterval) -> Bool {
|
||||
left.start <= right && left.end >= right
|
||||
}
|
||||
|
||||
public static func < (left: SubtitlePart, right: TimeInterval) -> Bool {
|
||||
left.end < right
|
||||
}
|
||||
}
|
||||
|
||||
public protocol KSSubtitleProtocol {
|
||||
func search(for time: TimeInterval) -> [SubtitlePart]
|
||||
}
|
||||
|
||||
public protocol SubtitleInfo: KSSubtitleProtocol, AnyObject, Hashable, Identifiable {
|
||||
var subtitleID: String { get }
|
||||
var name: String { get }
|
||||
var delay: TimeInterval { get set }
|
||||
// var userInfo: NSMutableDictionary? { get set }
|
||||
// var subtitleDataSouce: SubtitleDataSouce? { get set }
|
||||
// var comment: String? { get }
|
||||
var isEnabled: Bool { get set }
|
||||
}
|
||||
|
||||
public extension SubtitleInfo {
|
||||
var id: String { subtitleID }
|
||||
func hash(into hasher: inout Hasher) {
|
||||
hasher.combine(subtitleID)
|
||||
}
|
||||
|
||||
static func == (lhs: Self, rhs: Self) -> Bool {
|
||||
lhs.subtitleID == rhs.subtitleID
|
||||
}
|
||||
}
|
||||
|
||||
public class KSSubtitle {
|
||||
public var parts: [SubtitlePart] = []
|
||||
public init() {}
|
||||
}
|
||||
|
||||
extension KSSubtitle: KSSubtitleProtocol {
|
||||
/// Search for target group for time
|
||||
public func search(for time: TimeInterval) -> [SubtitlePart] {
|
||||
var result = [SubtitlePart]()
|
||||
for part in parts {
|
||||
if part == time {
|
||||
result.append(part)
|
||||
} else if part.start > time {
|
||||
break
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
public extension KSSubtitle {
|
||||
func parse(url: URL, userAgent: String? = nil, encoding: String.Encoding? = nil) async throws {
|
||||
let data = try await url.data(userAgent: userAgent)
|
||||
try parse(data: data, encoding: encoding)
|
||||
}
|
||||
|
||||
func parse(data: Data, encoding: String.Encoding? = nil) throws {
|
||||
var string: String?
|
||||
let encodes = [encoding ?? String.Encoding.utf8,
|
||||
String.Encoding(rawValue: CFStringConvertEncodingToNSStringEncoding(CFStringEncoding(CFStringEncodings.big5.rawValue))),
|
||||
String.Encoding(rawValue: CFStringConvertEncodingToNSStringEncoding(CFStringEncoding(CFStringEncodings.GB_18030_2000.rawValue))),
|
||||
String.Encoding.unicode]
|
||||
for encode in encodes {
|
||||
string = String(data: data, encoding: encode)
|
||||
if string != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
guard let subtitle = string else {
|
||||
throw NSError(errorCode: .subtitleUnEncoding)
|
||||
}
|
||||
let scanner = Scanner(string: subtitle)
|
||||
_ = scanner.scanCharacters(from: .controlCharacters)
|
||||
let parse = KSOptions.subtitleParses.first { $0.canParse(scanner: scanner) }
|
||||
if let parse {
|
||||
parts = parse.parse(scanner: scanner)
|
||||
if parts.count == 0 {
|
||||
throw NSError(errorCode: .subtitleUnParse)
|
||||
}
|
||||
} else {
|
||||
throw NSError(errorCode: .subtitleFormatUnSupport)
|
||||
}
|
||||
}
|
||||
|
||||
// public static func == (lhs: KSURLSubtitle, rhs: KSURLSubtitle) -> Bool {
|
||||
// lhs.url == rhs.url
|
||||
// }
|
||||
}
|
||||
|
||||
public protocol NumericComparable {
|
||||
associatedtype Compare
|
||||
static func < (lhs: Self, rhs: Compare) -> Bool
|
||||
static func == (lhs: Self, rhs: Compare) -> Bool
|
||||
}
|
||||
|
||||
extension Collection where Element: NumericComparable {
|
||||
func binarySearch(key: Element.Compare) -> Self.Index? {
|
||||
var lowerBound = startIndex
|
||||
var upperBound = endIndex
|
||||
while lowerBound < upperBound {
|
||||
let midIndex = index(lowerBound, offsetBy: distance(from: lowerBound, to: upperBound) / 2)
|
||||
if self[midIndex] == key {
|
||||
return midIndex
|
||||
} else if self[midIndex] < key {
|
||||
lowerBound = index(lowerBound, offsetBy: 1)
|
||||
} else {
|
||||
upperBound = midIndex
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
open class SubtitleModel: ObservableObject {
|
||||
public enum Size {
|
||||
case smaller
|
||||
case standard
|
||||
case large
|
||||
public var rawValue: CGFloat {
|
||||
switch self {
|
||||
case .smaller:
|
||||
#if os(tvOS) || os(xrOS)
|
||||
return 48
|
||||
#elseif os(macOS) || os(xrOS)
|
||||
return 20
|
||||
#else
|
||||
if UI_USER_INTERFACE_IDIOM() == .phone {
|
||||
return 12
|
||||
} else {
|
||||
return 20
|
||||
}
|
||||
#endif
|
||||
case .standard:
|
||||
#if os(tvOS) || os(xrOS)
|
||||
return 58
|
||||
#elseif os(macOS) || os(xrOS)
|
||||
return 26
|
||||
#else
|
||||
if UI_USER_INTERFACE_IDIOM() == .phone {
|
||||
return 16
|
||||
} else {
|
||||
return 26
|
||||
}
|
||||
#endif
|
||||
case .large:
|
||||
#if os(tvOS) || os(xrOS)
|
||||
return 68
|
||||
#elseif os(macOS) || os(xrOS)
|
||||
return 32
|
||||
#else
|
||||
if UI_USER_INTERFACE_IDIOM() == .phone {
|
||||
return 20
|
||||
} else {
|
||||
return 32
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static var textColor: Color = .white
|
||||
public static var textBackgroundColor: Color = .clear
|
||||
public static var textFont: UIFont {
|
||||
textBold ? .boldSystemFont(ofSize: textFontSize) : .systemFont(ofSize: textFontSize)
|
||||
}
|
||||
|
||||
public static var textFontSize = SubtitleModel.Size.standard.rawValue
|
||||
public static var textBold = false
|
||||
public static var textItalic = false
|
||||
public static var textPosition = TextPosition()
|
||||
public static var audioRecognizes = [any AudioRecognize]()
|
||||
private var subtitleDataSouces: [SubtitleDataSouce] = KSOptions.subtitleDataSouces
|
||||
@Published
|
||||
public private(set) var subtitleInfos = [any SubtitleInfo]()
|
||||
@Published
|
||||
public private(set) var parts = [SubtitlePart]()
|
||||
public var subtitleDelay = 0.0 // s
|
||||
public var url: URL? {
|
||||
didSet {
|
||||
subtitleInfos.removeAll()
|
||||
searchSubtitle(query: nil, languages: [])
|
||||
if url != nil {
|
||||
subtitleInfos.append(contentsOf: SubtitleModel.audioRecognizes)
|
||||
}
|
||||
for datasouce in subtitleDataSouces {
|
||||
addSubtitle(dataSouce: datasouce)
|
||||
}
|
||||
// 要用async,不能在更新UI的时候,修改Publishe变量
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.parts = []
|
||||
self?.selectedSubtitleInfo = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Published
|
||||
public var selectedSubtitleInfo: (any SubtitleInfo)? {
|
||||
didSet {
|
||||
oldValue?.isEnabled = false
|
||||
selectedSubtitleInfo?.isEnabled = true
|
||||
if let url, let info = selectedSubtitleInfo as? URLSubtitleInfo, !info.downloadURL.isFileURL, let cache = subtitleDataSouces.first(where: { $0 is CacheSubtitleDataSouce }) as? CacheSubtitleDataSouce {
|
||||
cache.addCache(fileURL: url, downloadURL: info.downloadURL)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public init() {}
|
||||
|
||||
public func addSubtitle(info: any SubtitleInfo) {
|
||||
if subtitleInfos.first(where: { $0.subtitleID == info.subtitleID }) == nil {
|
||||
subtitleInfos.append(info)
|
||||
}
|
||||
}
|
||||
|
||||
public func subtitle(currentTime: TimeInterval) -> Bool {
|
||||
var newParts = [SubtitlePart]()
|
||||
if let subtile = selectedSubtitleInfo {
|
||||
let currentTime = currentTime - subtile.delay - subtitleDelay
|
||||
newParts = subtile.search(for: currentTime)
|
||||
if newParts.isEmpty {
|
||||
newParts = parts.filter { part in
|
||||
part == currentTime
|
||||
}
|
||||
}
|
||||
}
|
||||
// swiftUI不会判断是否相等。所以需要这边判断下。
|
||||
if newParts != parts {
|
||||
for part in newParts {
|
||||
if let text = part.text as? NSMutableAttributedString {
|
||||
text.addAttributes([.font: SubtitleModel.textFont],
|
||||
range: NSRange(location: 0, length: text.length))
|
||||
}
|
||||
}
|
||||
parts = newParts
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
public func searchSubtitle(query: String?, languages: [String]) {
|
||||
for dataSouce in subtitleDataSouces {
|
||||
if let dataSouce = dataSouce as? SearchSubtitleDataSouce {
|
||||
subtitleInfos.removeAll { info in
|
||||
dataSouce.infos.contains {
|
||||
$0 === info
|
||||
}
|
||||
}
|
||||
Task { @MainActor in
|
||||
try? await dataSouce.searchSubtitle(query: query, languages: languages)
|
||||
subtitleInfos.append(contentsOf: dataSouce.infos)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func addSubtitle(dataSouce: SubtitleDataSouce) {
|
||||
if let dataSouce = dataSouce as? FileURLSubtitleDataSouce {
|
||||
Task { @MainActor in
|
||||
try? await dataSouce.searchSubtitle(fileURL: url)
|
||||
subtitleInfos.append(contentsOf: dataSouce.infos)
|
||||
}
|
||||
} else {
|
||||
subtitleInfos.append(contentsOf: dataSouce.infos)
|
||||
}
|
||||
}
|
||||
}
|
||||
404
KSPlayer-main/Sources/KSPlayer/Subtitle/SubtitleDataSouce.swift
Normal file
404
KSPlayer-main/Sources/KSPlayer/Subtitle/SubtitleDataSouce.swift
Normal file
@@ -0,0 +1,404 @@
|
||||
//
|
||||
// SubtitleDataSouce.swift
|
||||
// KSPlayer-7de52535
|
||||
//
|
||||
// Created by kintan on 2018/8/7.
|
||||
//
|
||||
import Foundation
|
||||
|
||||
public class EmptySubtitleInfo: SubtitleInfo {
|
||||
public var isEnabled: Bool = true
|
||||
public let subtitleID: String = ""
|
||||
public var delay: TimeInterval = 0
|
||||
public let name = NSLocalizedString("no show subtitle", comment: "")
|
||||
public func search(for _: TimeInterval) -> [SubtitlePart] {
|
||||
[]
|
||||
}
|
||||
}
|
||||
|
||||
public class URLSubtitleInfo: KSSubtitle, SubtitleInfo {
|
||||
public var isEnabled: Bool = false {
|
||||
didSet {
|
||||
if isEnabled, parts.isEmpty {
|
||||
Task {
|
||||
try? await parse(url: downloadURL, userAgent: userAgent)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public private(set) var downloadURL: URL
|
||||
public var delay: TimeInterval = 0
|
||||
public private(set) var name: String
|
||||
public let subtitleID: String
|
||||
public var comment: String?
|
||||
public var userInfo: NSMutableDictionary?
|
||||
private let userAgent: String?
|
||||
public convenience init(url: URL) {
|
||||
self.init(subtitleID: url.absoluteString, name: url.lastPathComponent, url: url)
|
||||
}
|
||||
|
||||
public init(subtitleID: String, name: String, url: URL, userAgent: String? = nil) {
|
||||
self.subtitleID = subtitleID
|
||||
self.name = name
|
||||
self.userAgent = userAgent
|
||||
downloadURL = url
|
||||
super.init()
|
||||
if !url.isFileURL, name.isEmpty {
|
||||
url.download(userAgent: userAgent) { [weak self] filename, tmpUrl in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.name = filename
|
||||
self.downloadURL = tmpUrl
|
||||
var fileURL = URL(fileURLWithPath: NSTemporaryDirectory())
|
||||
fileURL.appendPathComponent(filename)
|
||||
try? FileManager.default.moveItem(at: tmpUrl, to: fileURL)
|
||||
self.downloadURL = fileURL
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public protocol SubtitleDataSouce: AnyObject {
|
||||
var infos: [any SubtitleInfo] { get }
|
||||
}
|
||||
|
||||
public protocol FileURLSubtitleDataSouce: SubtitleDataSouce {
|
||||
func searchSubtitle(fileURL: URL?) async throws
|
||||
}
|
||||
|
||||
public protocol CacheSubtitleDataSouce: FileURLSubtitleDataSouce {
|
||||
func addCache(fileURL: URL, downloadURL: URL)
|
||||
}
|
||||
|
||||
public protocol SearchSubtitleDataSouce: SubtitleDataSouce {
|
||||
func searchSubtitle(query: String?, languages: [String]) async throws
|
||||
}
|
||||
|
||||
public extension KSOptions {
|
||||
static var subtitleDataSouces: [SubtitleDataSouce] = [DirectorySubtitleDataSouce()]
|
||||
}
|
||||
|
||||
public class PlistCacheSubtitleDataSouce: CacheSubtitleDataSouce {
|
||||
public static let singleton = PlistCacheSubtitleDataSouce()
|
||||
public var infos = [any SubtitleInfo]()
|
||||
private let srtCacheInfoPath: String
|
||||
// 因为plist不能保存URL
|
||||
private var srtInfoCaches: [String: [String]]
|
||||
private init() {
|
||||
let cacheFolder = (NSTemporaryDirectory() as NSString).appendingPathComponent("KSSubtitleCache")
|
||||
if !FileManager.default.fileExists(atPath: cacheFolder) {
|
||||
try? FileManager.default.createDirectory(atPath: cacheFolder, withIntermediateDirectories: true, attributes: nil)
|
||||
}
|
||||
srtCacheInfoPath = (cacheFolder as NSString).appendingPathComponent("KSSrtInfo.plist")
|
||||
srtInfoCaches = [String: [String]]()
|
||||
DispatchQueue.global().async { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.srtInfoCaches = (NSMutableDictionary(contentsOfFile: self.srtCacheInfoPath) as? [String: [String]]) ?? [String: [String]]()
|
||||
}
|
||||
}
|
||||
|
||||
public func searchSubtitle(fileURL: URL?) async throws {
|
||||
infos = [any SubtitleInfo]()
|
||||
guard let fileURL else {
|
||||
return
|
||||
}
|
||||
infos = srtInfoCaches[fileURL.absoluteString]?.compactMap { downloadURL -> (any SubtitleInfo)? in
|
||||
guard let url = URL(string: downloadURL) else {
|
||||
return nil
|
||||
}
|
||||
let info = URLSubtitleInfo(url: url)
|
||||
info.comment = "local"
|
||||
return info
|
||||
} ?? [any SubtitleInfo]()
|
||||
}
|
||||
|
||||
public func addCache(fileURL: URL, downloadURL: URL) {
|
||||
let file = fileURL.absoluteString
|
||||
let path = downloadURL.absoluteString
|
||||
var array = srtInfoCaches[file] ?? [String]()
|
||||
if !array.contains(where: { $0 == path }) {
|
||||
array.append(path)
|
||||
srtInfoCaches[file] = array
|
||||
DispatchQueue.global().async { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
(self.srtInfoCaches as NSDictionary).write(toFile: self.srtCacheInfoPath, atomically: false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class URLSubtitleDataSouce: SubtitleDataSouce {
|
||||
public var infos: [any SubtitleInfo]
|
||||
public init(urls: [URL]) {
|
||||
infos = urls.map { URLSubtitleInfo(url: $0) }
|
||||
}
|
||||
}
|
||||
|
||||
public class DirectorySubtitleDataSouce: FileURLSubtitleDataSouce {
|
||||
public var infos = [any SubtitleInfo]()
|
||||
public init() {}
|
||||
|
||||
public func searchSubtitle(fileURL: URL?) async throws {
|
||||
infos = [any SubtitleInfo]()
|
||||
guard let fileURL else {
|
||||
return
|
||||
}
|
||||
if fileURL.isFileURL {
|
||||
let subtitleURLs: [URL] = (try? FileManager.default.contentsOfDirectory(at: fileURL.deletingLastPathComponent(), includingPropertiesForKeys: nil).filter(\.isSubtitle)) ?? []
|
||||
infos = subtitleURLs.map { URLSubtitleInfo(url: $0) }.sorted { left, right in
|
||||
left.name < right.name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class ShooterSubtitleDataSouce: FileURLSubtitleDataSouce {
|
||||
public var infos = [any SubtitleInfo]()
|
||||
public init() {}
|
||||
public func searchSubtitle(fileURL: URL?) async throws {
|
||||
infos = [any SubtitleInfo]()
|
||||
guard let fileURL else {
|
||||
return
|
||||
}
|
||||
guard fileURL.isFileURL, let searchApi = URL(string: "https://www.shooter.cn/api/subapi.php")?
|
||||
.add(queryItems: ["format": "json", "pathinfo": fileURL.path, "filehash": fileURL.shooterFilehash])
|
||||
else {
|
||||
return
|
||||
}
|
||||
var request = URLRequest(url: searchApi)
|
||||
request.httpMethod = "POST"
|
||||
let (data, _) = try await URLSession.shared.data(for: request)
|
||||
guard let json = try JSONSerialization.jsonObject(with: data) as? [[String: Any]] else {
|
||||
return
|
||||
}
|
||||
infos = json.flatMap { sub in
|
||||
let filesDic = sub["Files"] as? [[String: String]]
|
||||
// let desc = sub["Desc"] as? String ?? ""
|
||||
let delay = TimeInterval(sub["Delay"] as? Int ?? 0) / 1000.0
|
||||
return filesDic?.compactMap { dic in
|
||||
if let string = dic["Link"], let url = URL(string: string) {
|
||||
let info = URLSubtitleInfo(subtitleID: string, name: "", url: url)
|
||||
info.delay = delay
|
||||
return info
|
||||
}
|
||||
return nil
|
||||
} ?? [URLSubtitleInfo]()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class AssrtSubtitleDataSouce: SearchSubtitleDataSouce {
|
||||
private let token: String
|
||||
public var infos = [any SubtitleInfo]()
|
||||
public init(token: String) {
|
||||
self.token = token
|
||||
}
|
||||
|
||||
public func searchSubtitle(query: String?, languages _: [String] = ["zh-cn"]) async throws {
|
||||
infos = [any SubtitleInfo]()
|
||||
guard let query else {
|
||||
return
|
||||
}
|
||||
guard let searchApi = URL(string: "https://api.assrt.net/v1/sub/search")?.add(queryItems: ["q": query]) else {
|
||||
return
|
||||
}
|
||||
var request = URLRequest(url: searchApi)
|
||||
request.httpMethod = "POST"
|
||||
request.addValue("Bearer \(token)", forHTTPHeaderField: "Authorization")
|
||||
let (data, _) = try await URLSession.shared.data(for: request)
|
||||
guard let json = try JSONSerialization.jsonObject(with: data) as? [String: Any] else {
|
||||
return
|
||||
}
|
||||
guard let status = json["status"] as? Int, status == 0 else {
|
||||
return
|
||||
}
|
||||
guard let subDict = json["sub"] as? [String: Any], let subArray = subDict["subs"] as? [[String: Any]] else {
|
||||
return
|
||||
}
|
||||
var result = [URLSubtitleInfo]()
|
||||
for sub in subArray {
|
||||
if let assrtSubID = sub["id"] as? Int {
|
||||
try await result.append(contentsOf: loadDetails(assrtSubID: String(assrtSubID)))
|
||||
}
|
||||
}
|
||||
infos = result
|
||||
}
|
||||
|
||||
func loadDetails(assrtSubID: String) async throws -> [URLSubtitleInfo] {
|
||||
var infos = [URLSubtitleInfo]()
|
||||
guard let detailApi = URL(string: "https://api.assrt.net/v1/sub/detail")?.add(queryItems: ["id": assrtSubID]) else {
|
||||
return infos
|
||||
}
|
||||
var request = URLRequest(url: detailApi)
|
||||
request.httpMethod = "POST"
|
||||
request.addValue("Bearer \(token)", forHTTPHeaderField: "Authorization")
|
||||
let (data, _) = try await URLSession.shared.data(for: request)
|
||||
guard let json = try? JSONSerialization.jsonObject(with: data) as? [String: Any] else {
|
||||
return infos
|
||||
}
|
||||
guard let status = json["status"] as? Int, status == 0 else {
|
||||
return infos
|
||||
}
|
||||
guard let subDict = json["sub"] as? [String: Any], let subArray = subDict["subs"] as? [[String: Any]], let sub = subArray.first else {
|
||||
return infos
|
||||
}
|
||||
if let fileList = sub["filelist"] as? [[String: String]] {
|
||||
for dic in fileList {
|
||||
if let urlString = dic["url"], let filename = dic["f"], let url = URL(string: urlString) {
|
||||
let info = URLSubtitleInfo(subtitleID: urlString, name: filename, url: url)
|
||||
infos.append(info)
|
||||
}
|
||||
}
|
||||
} else if let urlString = sub["url"] as? String, let filename = sub["filename"] as? String, let url = URL(string: urlString) {
|
||||
let info = URLSubtitleInfo(subtitleID: urlString, name: filename, url: url)
|
||||
infos.append(info)
|
||||
}
|
||||
return infos
|
||||
}
|
||||
}
|
||||
|
||||
public class OpenSubtitleDataSouce: SearchSubtitleDataSouce {
|
||||
private var token: String? = nil
|
||||
private let username: String?
|
||||
private let password: String?
|
||||
private let apiKey: String
|
||||
public var infos = [any SubtitleInfo]()
|
||||
public init(apiKey: String, username: String? = nil, password: String? = nil) {
|
||||
self.apiKey = apiKey
|
||||
self.username = username
|
||||
self.password = password
|
||||
}
|
||||
|
||||
public func searchSubtitle(query: String?, languages: [String] = ["zh-cn"]) async throws {
|
||||
try await searchSubtitle(query: query, imdbID: 0, tmdbID: 0, languages: languages)
|
||||
}
|
||||
|
||||
public func searchSubtitle(query: String?, imdbID: Int, tmdbID: Int, languages: [String] = ["zh-cn"]) async throws {
|
||||
infos = [any SubtitleInfo]()
|
||||
var queryItems = [String: String]()
|
||||
if let query {
|
||||
queryItems["query"] = query
|
||||
}
|
||||
if imdbID != 0 {
|
||||
queryItems["imbd_id"] = String(imdbID)
|
||||
}
|
||||
if tmdbID != 0 {
|
||||
queryItems["tmdb_id"] = String(tmdbID)
|
||||
}
|
||||
if queryItems.isEmpty {
|
||||
return
|
||||
}
|
||||
queryItems["languages"] = languages.joined(separator: ",")
|
||||
try await searchSubtitle(queryItems: queryItems)
|
||||
}
|
||||
|
||||
// https://opensubtitles.stoplight.io/docs/opensubtitles-api/a172317bd5ccc-search-for-subtitles
|
||||
public func searchSubtitle(queryItems: [String: String]) async throws {
|
||||
infos = [any SubtitleInfo]()
|
||||
if queryItems.isEmpty {
|
||||
return
|
||||
}
|
||||
guard let searchApi = URL(string: "https://api.opensubtitles.com/api/v1/subtitles")?.add(queryItems: queryItems) else {
|
||||
return
|
||||
}
|
||||
var request = URLRequest(url: searchApi)
|
||||
request.addValue(apiKey, forHTTPHeaderField: "Api-Key")
|
||||
if let token {
|
||||
request.addValue("Bearer \(token)", forHTTPHeaderField: "Authorization")
|
||||
}
|
||||
let (data, _) = try await URLSession.shared.data(for: request)
|
||||
guard let json = try JSONSerialization.jsonObject(with: data) as? [String: Any] else {
|
||||
return
|
||||
}
|
||||
guard let dataArray = json["data"] as? [[String: Any]] else {
|
||||
return
|
||||
}
|
||||
var result = [URLSubtitleInfo]()
|
||||
for sub in dataArray {
|
||||
if let attributes = sub["attributes"] as? [String: Any], let files = attributes["files"] as? [[String: Any]] {
|
||||
for file in files {
|
||||
if let fileID = file["file_id"] as? Int, let info = try await loadDetails(fileID: fileID) {
|
||||
result.append(info)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
infos = result
|
||||
}
|
||||
|
||||
func loadDetails(fileID: Int) async throws -> URLSubtitleInfo? {
|
||||
guard let detailApi = URL(string: "https://api.opensubtitles.com/api/v1/download")?.add(queryItems: ["file_id": String(fileID)]) else {
|
||||
return nil
|
||||
}
|
||||
var request = URLRequest(url: detailApi)
|
||||
request.httpMethod = "POST"
|
||||
request.addValue(apiKey, forHTTPHeaderField: "Api-Key")
|
||||
if let token {
|
||||
request.addValue("Bearer \(token)", forHTTPHeaderField: "Authorization")
|
||||
}
|
||||
let (data, _) = try await URLSession.shared.data(for: request)
|
||||
guard let json = try? JSONSerialization.jsonObject(with: data) as? [String: Any] else {
|
||||
return nil
|
||||
}
|
||||
guard let link = json["link"] as? String, let fileName = json["file_name"] as?
|
||||
String, let url = URL(string: link)
|
||||
else {
|
||||
return nil
|
||||
}
|
||||
return URLSubtitleInfo(subtitleID: String(fileID), name: fileName, url: url)
|
||||
}
|
||||
}
|
||||
|
||||
extension URL {
|
||||
public var components: URLComponents? {
|
||||
URLComponents(url: self, resolvingAgainstBaseURL: true)
|
||||
}
|
||||
|
||||
func add(queryItems: [String: String]) -> URL? {
|
||||
guard var urlComponents = components else {
|
||||
return nil
|
||||
}
|
||||
var reserved = CharacterSet.urlQueryAllowed
|
||||
reserved.remove(charactersIn: ": #[]@!$&'()*+, ;=")
|
||||
urlComponents.percentEncodedQueryItems = queryItems.compactMap { key, value in
|
||||
URLQueryItem(name: key.addingPercentEncoding(withAllowedCharacters: reserved) ?? key, value: value.addingPercentEncoding(withAllowedCharacters: reserved))
|
||||
}
|
||||
return urlComponents.url
|
||||
}
|
||||
|
||||
var shooterFilehash: String {
|
||||
let file: FileHandle
|
||||
do {
|
||||
file = try FileHandle(forReadingFrom: self)
|
||||
} catch {
|
||||
return ""
|
||||
}
|
||||
defer { file.closeFile() }
|
||||
|
||||
file.seekToEndOfFile()
|
||||
let fileSize: UInt64 = file.offsetInFile
|
||||
|
||||
guard fileSize >= 12288 else {
|
||||
return ""
|
||||
}
|
||||
|
||||
let offsets: [UInt64] = [
|
||||
4096,
|
||||
fileSize / 3 * 2,
|
||||
fileSize / 3,
|
||||
fileSize - 8192,
|
||||
]
|
||||
|
||||
let hash = offsets.map { offset -> String in
|
||||
file.seek(toFileOffset: offset)
|
||||
return file.readData(ofLength: 4096).md5()
|
||||
}.joined(separator: ";")
|
||||
return hash
|
||||
}
|
||||
}
|
||||
77
KSPlayer-main/Sources/KSPlayer/SwiftUI/AirPlayView.swift
Normal file
77
KSPlayer-main/Sources/KSPlayer/SwiftUI/AirPlayView.swift
Normal file
@@ -0,0 +1,77 @@
|
||||
//
|
||||
// AirPlayView.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2023/5/4.
|
||||
//
|
||||
|
||||
import AVKit
|
||||
import SwiftUI
|
||||
|
||||
#if !os(xrOS)
|
||||
public struct AirPlayView: UIViewRepresentable {
|
||||
public init() {}
|
||||
|
||||
#if canImport(UIKit)
|
||||
public typealias UIViewType = AVRoutePickerView
|
||||
public func makeUIView(context _: Context) -> UIViewType {
|
||||
let routePickerView = AVRoutePickerView()
|
||||
routePickerView.tintColor = .white
|
||||
return routePickerView
|
||||
}
|
||||
|
||||
public func updateUIView(_: UIViewType, context _: Context) {}
|
||||
#else
|
||||
public typealias NSViewType = AVRoutePickerView
|
||||
public func makeNSView(context _: Context) -> NSViewType {
|
||||
let routePickerView = AVRoutePickerView()
|
||||
routePickerView.isRoutePickerButtonBordered = false
|
||||
return routePickerView
|
||||
}
|
||||
|
||||
public func updateNSView(_: NSViewType, context _: Context) {}
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
public extension View {
|
||||
/// Applies the given transform if the given condition evaluates to `true`.
|
||||
/// - Parameters:
|
||||
/// - condition: The condition to evaluate.
|
||||
/// - transform: The transform to apply to the source `View`.
|
||||
/// - Returns: Either the original `View` or the modified `View` if the condition is `true`.
|
||||
@ViewBuilder
|
||||
func `if`(_ condition: @autoclosure () -> Bool, transform: (Self) -> some View) -> some View {
|
||||
if condition() {
|
||||
transform(self)
|
||||
} else {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
@ViewBuilder
|
||||
func `if`(_ condition: @autoclosure () -> Bool, if ifTransform: (Self) -> some View, else elseTransform: (Self) -> some View) -> some View {
|
||||
if condition() {
|
||||
ifTransform(self)
|
||||
} else {
|
||||
elseTransform(self)
|
||||
}
|
||||
}
|
||||
|
||||
@ViewBuilder
|
||||
func ifLet<T: Any>(_ optionalValue: T?, transform: (Self, T) -> some View) -> some View {
|
||||
if let value = optionalValue {
|
||||
transform(self, value)
|
||||
} else {
|
||||
self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension Bool {
|
||||
static var iOS16: Bool {
|
||||
guard #available(iOS 16, *) else {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
783
KSPlayer-main/Sources/KSPlayer/SwiftUI/KSVideoPlayerView.swift
Normal file
783
KSPlayer-main/Sources/KSPlayer/SwiftUI/KSVideoPlayerView.swift
Normal file
@@ -0,0 +1,783 @@
|
||||
//
|
||||
// File.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2022/1/29.
|
||||
//
|
||||
import AVFoundation
|
||||
import MediaPlayer
|
||||
import SwiftUI
|
||||
|
||||
@available(iOS 16.0, macOS 13.0, tvOS 16.0, *)
|
||||
@MainActor
|
||||
public struct KSVideoPlayerView: View {
|
||||
private let subtitleDataSouce: SubtitleDataSouce?
|
||||
@State
|
||||
private var title: String
|
||||
@StateObject
|
||||
private var playerCoordinator: KSVideoPlayer.Coordinator
|
||||
@Environment(\.dismiss)
|
||||
private var dismiss
|
||||
@FocusState
|
||||
private var focusableField: FocusableField? {
|
||||
willSet {
|
||||
isDropdownShow = newValue == .info
|
||||
}
|
||||
}
|
||||
|
||||
public let options: KSOptions
|
||||
@State
|
||||
private var isDropdownShow = false
|
||||
@State
|
||||
private var showVideoSetting = false
|
||||
@State
|
||||
public var url: URL {
|
||||
didSet {
|
||||
#if os(macOS)
|
||||
NSDocumentController.shared.noteNewRecentDocumentURL(url)
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
public init(url: URL, options: KSOptions, title: String? = nil) {
|
||||
self.init(coordinator: KSVideoPlayer.Coordinator(), url: url, options: options, title: title, subtitleDataSouce: nil)
|
||||
}
|
||||
|
||||
public init(coordinator: KSVideoPlayer.Coordinator, url: URL, options: KSOptions, title: String? = nil, subtitleDataSouce: SubtitleDataSouce? = nil) {
|
||||
self.init(coordinator: coordinator, url: .init(wrappedValue: url), options: options, title: .init(wrappedValue: title ?? url.lastPathComponent), subtitleDataSouce: subtitleDataSouce)
|
||||
}
|
||||
|
||||
public init(coordinator: KSVideoPlayer.Coordinator, url: State<URL>, options: KSOptions, title: State<String>, subtitleDataSouce: SubtitleDataSouce?) {
|
||||
_url = url
|
||||
_playerCoordinator = .init(wrappedValue: coordinator)
|
||||
_title = title
|
||||
#if os(macOS)
|
||||
NSDocumentController.shared.noteNewRecentDocumentURL(url.wrappedValue)
|
||||
#endif
|
||||
self.options = options
|
||||
self.subtitleDataSouce = subtitleDataSouce
|
||||
}
|
||||
|
||||
public var body: some View {
|
||||
ZStack {
|
||||
GeometryReader { proxy in
|
||||
playView
|
||||
HStack {
|
||||
Spacer()
|
||||
VideoSubtitleView(model: playerCoordinator.subtitleModel)
|
||||
.allowsHitTesting(false) // 禁止字幕视图交互,以免抢占视图的点击事件或其它手势事件
|
||||
Spacer()
|
||||
}
|
||||
.padding()
|
||||
controllerView(playerWidth: proxy.size.width)
|
||||
#if os(tvOS)
|
||||
.ignoresSafeArea()
|
||||
#endif
|
||||
#if os(tvOS)
|
||||
if isDropdownShow {
|
||||
VideoSettingView(config: playerCoordinator, subtitleModel: playerCoordinator.subtitleModel, subtitleTitle: title)
|
||||
.focused($focusableField, equals: .info)
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
.preferredColorScheme(.dark)
|
||||
.tint(.white)
|
||||
.persistentSystemOverlays(.hidden)
|
||||
.toolbar(.hidden, for: .automatic)
|
||||
#if os(tvOS)
|
||||
.onPlayPauseCommand {
|
||||
if playerCoordinator.state.isPlaying {
|
||||
playerCoordinator.playerLayer?.pause()
|
||||
} else {
|
||||
playerCoordinator.playerLayer?.play()
|
||||
}
|
||||
}
|
||||
.onExitCommand {
|
||||
if playerCoordinator.isMaskShow {
|
||||
playerCoordinator.isMaskShow = false
|
||||
} else {
|
||||
switch focusableField {
|
||||
case .play:
|
||||
dismiss()
|
||||
default:
|
||||
focusableField = .play
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
private var playView: some View {
|
||||
KSVideoPlayer(coordinator: playerCoordinator, url: url, options: options)
|
||||
.onStateChanged { playerLayer, state in
|
||||
if state == .readyToPlay {
|
||||
if let movieTitle = playerLayer.player.dynamicInfo?.metadata["title"] {
|
||||
title = movieTitle
|
||||
}
|
||||
}
|
||||
}
|
||||
.onBufferChanged { bufferedCount, consumeTime in
|
||||
print("bufferedCount \(bufferedCount), consumeTime \(consumeTime)")
|
||||
}
|
||||
#if canImport(UIKit)
|
||||
.onSwipe { _ in
|
||||
playerCoordinator.isMaskShow = true
|
||||
}
|
||||
#endif
|
||||
.ignoresSafeArea()
|
||||
.onAppear {
|
||||
focusableField = .play
|
||||
if let subtitleDataSouce {
|
||||
playerCoordinator.subtitleModel.addSubtitle(dataSouce: subtitleDataSouce)
|
||||
}
|
||||
// 不要加这个,不然playerCoordinator无法释放,也可以在onDisappear调用removeMonitor释放
|
||||
// #if os(macOS)
|
||||
// NSEvent.addLocalMonitorForEvents(matching: [.mouseMoved]) {
|
||||
// isMaskShow = overView
|
||||
// return $0
|
||||
// }
|
||||
// #endif
|
||||
}
|
||||
|
||||
#if os(iOS) || os(xrOS)
|
||||
.navigationBarTitleDisplayMode(.inline)
|
||||
#endif
|
||||
#if !os(iOS)
|
||||
.focusable(!playerCoordinator.isMaskShow)
|
||||
.focused($focusableField, equals: .play)
|
||||
#endif
|
||||
#if !os(xrOS)
|
||||
.onKeyPressLeftArrow {
|
||||
playerCoordinator.skip(interval: -15)
|
||||
}
|
||||
.onKeyPressRightArrow {
|
||||
playerCoordinator.skip(interval: 15)
|
||||
}
|
||||
.onKeyPressSapce {
|
||||
if playerCoordinator.state.isPlaying {
|
||||
playerCoordinator.playerLayer?.pause()
|
||||
} else {
|
||||
playerCoordinator.playerLayer?.play()
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#if os(macOS)
|
||||
.onTapGesture(count: 2) {
|
||||
guard let view = playerCoordinator.playerLayer?.player.view else {
|
||||
return
|
||||
}
|
||||
view.window?.toggleFullScreen(nil)
|
||||
view.needsLayout = true
|
||||
view.layoutSubtreeIfNeeded()
|
||||
}
|
||||
.onExitCommand {
|
||||
playerCoordinator.playerLayer?.player.view?.exitFullScreenMode()
|
||||
}
|
||||
.onMoveCommand { direction in
|
||||
switch direction {
|
||||
case .left:
|
||||
playerCoordinator.skip(interval: -15)
|
||||
case .right:
|
||||
playerCoordinator.skip(interval: 15)
|
||||
case .up:
|
||||
playerCoordinator.playerLayer?.player.playbackVolume += 0.2
|
||||
case .down:
|
||||
playerCoordinator.playerLayer?.player.playbackVolume -= 0.2
|
||||
@unknown default:
|
||||
break
|
||||
}
|
||||
}
|
||||
#else
|
||||
.onTapGesture {
|
||||
playerCoordinator.isMaskShow.toggle()
|
||||
}
|
||||
#endif
|
||||
#if os(tvOS)
|
||||
.onMoveCommand { direction in
|
||||
switch direction {
|
||||
case .left:
|
||||
playerCoordinator.skip(interval: -15)
|
||||
case .right:
|
||||
playerCoordinator.skip(interval: 15)
|
||||
case .up:
|
||||
playerCoordinator.mask(show: true, autoHide: false)
|
||||
case .down:
|
||||
focusableField = .info
|
||||
@unknown default:
|
||||
break
|
||||
}
|
||||
}
|
||||
#else
|
||||
.onHover { _ in
|
||||
playerCoordinator.isMaskShow = true
|
||||
}
|
||||
.onDrop(of: ["public.file-url"], isTargeted: nil) { providers -> Bool in
|
||||
providers.first?.loadDataRepresentation(forTypeIdentifier: "public.file-url") { data, _ in
|
||||
if let data, let path = NSString(data: data, encoding: 4), let url = URL(string: path as String) {
|
||||
openURL(url)
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
private func controllerView(playerWidth: Double) -> some View {
|
||||
VStack {
|
||||
VideoControllerView(config: playerCoordinator, subtitleModel: playerCoordinator.subtitleModel, title: $title, volumeSliderSize: playerWidth / 4)
|
||||
#if !os(xrOS)
|
||||
// 设置opacity为0,还是会去更新View。所以只能这样了
|
||||
if playerCoordinator.isMaskShow {
|
||||
VideoTimeShowView(config: playerCoordinator, model: playerCoordinator.timemodel)
|
||||
.onAppear {
|
||||
focusableField = .controller
|
||||
}
|
||||
.onDisappear {
|
||||
focusableField = .play
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
#if os(xrOS)
|
||||
.ornament(visibility: playerCoordinator.isMaskShow ? .visible : .hidden, attachmentAnchor: .scene(.bottom)) {
|
||||
ornamentView(playerWidth: playerWidth)
|
||||
}
|
||||
.sheet(isPresented: $showVideoSetting) {
|
||||
NavigationStack {
|
||||
VideoSettingView(config: playerCoordinator, subtitleModel: playerCoordinator.subtitleModel, subtitleTitle: title)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
}
|
||||
#elseif os(tvOS)
|
||||
.padding(.horizontal, 80)
|
||||
.padding(.bottom, 80)
|
||||
.background(overlayGradient)
|
||||
#endif
|
||||
.focused($focusableField, equals: .controller)
|
||||
.opacity(playerCoordinator.isMaskShow ? 1 : 0)
|
||||
.padding()
|
||||
}
|
||||
|
||||
private let overlayGradient = LinearGradient(
|
||||
stops: [
|
||||
Gradient.Stop(color: .black.opacity(0), location: 0.22),
|
||||
Gradient.Stop(color: .black.opacity(0.7), location: 1),
|
||||
],
|
||||
startPoint: .top,
|
||||
endPoint: .bottom
|
||||
)
|
||||
private func ornamentView(playerWidth: Double) -> some View {
|
||||
VStack(alignment: .leading) {
|
||||
KSVideoPlayerViewBuilder.titleView(title: title, config: playerCoordinator)
|
||||
ornamentControlsView(playerWidth: playerWidth)
|
||||
}
|
||||
.frame(width: playerWidth / 1.5)
|
||||
.buttonStyle(.plain)
|
||||
.padding(.vertical, 24)
|
||||
.padding(.horizontal, 36)
|
||||
#if os(xrOS)
|
||||
.glassBackgroundEffect()
|
||||
#endif
|
||||
}
|
||||
|
||||
private func ornamentControlsView(playerWidth _: Double) -> some View {
|
||||
HStack {
|
||||
KSVideoPlayerViewBuilder.playbackControlView(config: playerCoordinator, spacing: 16)
|
||||
Spacer()
|
||||
VideoTimeShowView(config: playerCoordinator, model: playerCoordinator.timemodel, timeFont: .title3.monospacedDigit())
|
||||
Spacer()
|
||||
Group {
|
||||
KSVideoPlayerViewBuilder.contentModeButton(config: playerCoordinator)
|
||||
KSVideoPlayerViewBuilder.subtitleButton(config: playerCoordinator)
|
||||
KSVideoPlayerViewBuilder.playbackRateButton(playbackRate: $playerCoordinator.playbackRate)
|
||||
KSVideoPlayerViewBuilder.infoButton(showVideoSetting: $showVideoSetting)
|
||||
}
|
||||
.font(.largeTitle)
|
||||
}
|
||||
}
|
||||
|
||||
fileprivate enum FocusableField {
|
||||
case play, controller, info
|
||||
}
|
||||
|
||||
public func openURL(_ url: URL) {
|
||||
runOnMainThread {
|
||||
if url.isSubtitle {
|
||||
let info = URLSubtitleInfo(url: url)
|
||||
playerCoordinator.subtitleModel.selectedSubtitleInfo = info
|
||||
} else if url.isAudio || url.isMovie {
|
||||
self.url = url
|
||||
title = url.lastPathComponent
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension View {
|
||||
func onKeyPressLeftArrow(action: @escaping () -> Void) -> some View {
|
||||
if #available(iOS 17.0, macOS 14.0, tvOS 17.0, *) {
|
||||
return onKeyPress(.leftArrow) {
|
||||
action()
|
||||
return .handled
|
||||
}
|
||||
} else {
|
||||
return self
|
||||
}
|
||||
}
|
||||
|
||||
func onKeyPressRightArrow(action: @escaping () -> Void) -> some View {
|
||||
if #available(iOS 17.0, macOS 14.0, tvOS 17.0, *) {
|
||||
return onKeyPress(.rightArrow) {
|
||||
action()
|
||||
return .handled
|
||||
}
|
||||
} else {
|
||||
return self
|
||||
}
|
||||
}
|
||||
|
||||
func onKeyPressSapce(action: @escaping () -> Void) -> some View {
|
||||
if #available(iOS 17.0, macOS 14.0, tvOS 17.0, *) {
|
||||
return onKeyPress(.space) {
|
||||
action()
|
||||
return .handled
|
||||
}
|
||||
} else {
|
||||
return self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@available(iOS 16, tvOS 16, macOS 13, *)
|
||||
struct VideoControllerView: View {
|
||||
@ObservedObject
|
||||
fileprivate var config: KSVideoPlayer.Coordinator
|
||||
@ObservedObject
|
||||
fileprivate var subtitleModel: SubtitleModel
|
||||
@Binding
|
||||
fileprivate var title: String
|
||||
fileprivate var volumeSliderSize: Double?
|
||||
@State
|
||||
private var showVideoSetting = false
|
||||
@Environment(\.dismiss)
|
||||
private var dismiss
|
||||
public var body: some View {
|
||||
VStack {
|
||||
#if os(tvOS)
|
||||
Spacer()
|
||||
HStack {
|
||||
Text(title)
|
||||
.lineLimit(2)
|
||||
.layoutPriority(3)
|
||||
ProgressView()
|
||||
.opacity(config.state == .buffering ? 1 : 0)
|
||||
Spacer()
|
||||
.layoutPriority(2)
|
||||
HStack {
|
||||
Button {
|
||||
if config.state.isPlaying {
|
||||
config.playerLayer?.pause()
|
||||
} else {
|
||||
config.playerLayer?.play()
|
||||
}
|
||||
} label: {
|
||||
Image(systemName: config.state == .error ? "play.slash.fill" : (config.state.isPlaying ? "pause.circle.fill" : "play.circle.fill"))
|
||||
}
|
||||
.frame(width: 56)
|
||||
if let audioTracks = config.playerLayer?.player.tracks(mediaType: .audio), !audioTracks.isEmpty {
|
||||
audioButton(audioTracks: audioTracks)
|
||||
}
|
||||
muteButton
|
||||
.frame(width: 56)
|
||||
contentModeButton
|
||||
.frame(width: 56)
|
||||
subtitleButton
|
||||
playbackRateButton
|
||||
pipButton
|
||||
.frame(width: 56)
|
||||
infoButton
|
||||
.frame(width: 56)
|
||||
}
|
||||
.font(.caption)
|
||||
}
|
||||
#else
|
||||
HStack {
|
||||
#if !os(xrOS)
|
||||
Button {
|
||||
dismiss()
|
||||
} label: {
|
||||
Image(systemName: "x.circle.fill")
|
||||
}
|
||||
#if !os(tvOS)
|
||||
if config.playerLayer?.player.allowsExternalPlayback == true {
|
||||
AirPlayView().fixedSize()
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
Spacer()
|
||||
if let audioTracks = config.playerLayer?.player.tracks(mediaType: .audio), !audioTracks.isEmpty {
|
||||
audioButton(audioTracks: audioTracks)
|
||||
#if os(xrOS)
|
||||
.aspectRatio(1, contentMode: .fit)
|
||||
.glassBackgroundEffect()
|
||||
#endif
|
||||
}
|
||||
muteButton
|
||||
#if !os(xrOS)
|
||||
contentModeButton
|
||||
subtitleButton
|
||||
#endif
|
||||
}
|
||||
Spacer()
|
||||
#if !os(xrOS)
|
||||
KSVideoPlayerViewBuilder.playbackControlView(config: config)
|
||||
Spacer()
|
||||
HStack {
|
||||
KSVideoPlayerViewBuilder.titleView(title: title, config: config)
|
||||
Spacer()
|
||||
playbackRateButton
|
||||
pipButton
|
||||
infoButton
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
#if !os(tvOS)
|
||||
.font(.title)
|
||||
.buttonStyle(.borderless)
|
||||
#endif
|
||||
.sheet(isPresented: $showVideoSetting) {
|
||||
VideoSettingView(config: config, subtitleModel: config.subtitleModel, subtitleTitle: title)
|
||||
}
|
||||
}
|
||||
|
||||
private var muteButton: some View {
|
||||
#if os(xrOS)
|
||||
HStack {
|
||||
Slider(value: $config.playbackVolume, in: 0 ... 1)
|
||||
.onChange(of: config.playbackVolume) { _, newValue in
|
||||
config.isMuted = newValue == 0
|
||||
}
|
||||
.frame(width: volumeSliderSize ?? 100)
|
||||
.tint(.white.opacity(0.8))
|
||||
.padding(.leading, 16)
|
||||
KSVideoPlayerViewBuilder.muteButton(config: config)
|
||||
}
|
||||
.padding(16)
|
||||
.glassBackgroundEffect()
|
||||
#else
|
||||
KSVideoPlayerViewBuilder.muteButton(config: config)
|
||||
#endif
|
||||
}
|
||||
|
||||
private var contentModeButton: some View {
|
||||
KSVideoPlayerViewBuilder.contentModeButton(config: config)
|
||||
}
|
||||
|
||||
private func audioButton(audioTracks: [MediaPlayerTrack]) -> some View {
|
||||
MenuView(selection: Binding {
|
||||
audioTracks.first { $0.isEnabled }?.trackID
|
||||
} set: { value in
|
||||
if let track = audioTracks.first(where: { $0.trackID == value }) {
|
||||
config.playerLayer?.player.select(track: track)
|
||||
}
|
||||
}) {
|
||||
ForEach(audioTracks, id: \.trackID) { track in
|
||||
Text(track.description).tag(track.trackID as Int32?)
|
||||
}
|
||||
} label: {
|
||||
Image(systemName: "waveform.circle.fill")
|
||||
#if os(xrOS)
|
||||
.padding()
|
||||
.clipShape(Circle())
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
private var subtitleButton: some View {
|
||||
KSVideoPlayerViewBuilder.subtitleButton(config: config)
|
||||
}
|
||||
|
||||
private var playbackRateButton: some View {
|
||||
KSVideoPlayerViewBuilder.playbackRateButton(playbackRate: $config.playbackRate)
|
||||
}
|
||||
|
||||
private var pipButton: some View {
|
||||
Button {
|
||||
config.playerLayer?.isPipActive.toggle()
|
||||
} label: {
|
||||
Image(systemName: "rectangle.on.rectangle.circle.fill")
|
||||
}
|
||||
}
|
||||
|
||||
private var infoButton: some View {
|
||||
KSVideoPlayerViewBuilder.infoButton(showVideoSetting: $showVideoSetting)
|
||||
}
|
||||
}
|
||||
|
||||
@available(iOS 15, tvOS 16, macOS 12, *)
|
||||
public struct MenuView<Label, SelectionValue, Content>: View where Label: View, SelectionValue: Hashable, Content: View {
|
||||
public let selection: Binding<SelectionValue>
|
||||
@ViewBuilder
|
||||
public let content: () -> Content
|
||||
@ViewBuilder
|
||||
public let label: () -> Label
|
||||
@State
|
||||
private var showMenu = false
|
||||
public var body: some View {
|
||||
if #available(tvOS 17, *) {
|
||||
Menu {
|
||||
Picker(selection: selection) {
|
||||
content()
|
||||
} label: {
|
||||
EmptyView()
|
||||
}
|
||||
.pickerStyle(.inline)
|
||||
} label: {
|
||||
label()
|
||||
}
|
||||
.menuIndicator(.hidden)
|
||||
} else {
|
||||
Picker(selection: selection, content: content, label: label)
|
||||
#if !os(macOS)
|
||||
.pickerStyle(.navigationLink)
|
||||
#endif
|
||||
.frame(height: 50)
|
||||
#if os(tvOS)
|
||||
.frame(width: 110)
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@available(iOS 15, tvOS 15, macOS 12, *)
|
||||
struct VideoTimeShowView: View {
|
||||
@ObservedObject
|
||||
fileprivate var config: KSVideoPlayer.Coordinator
|
||||
@ObservedObject
|
||||
fileprivate var model: ControllerTimeModel
|
||||
fileprivate var timeFont: Font?
|
||||
public var body: some View {
|
||||
if config.playerLayer?.player.seekable ?? false {
|
||||
HStack {
|
||||
Text(model.currentTime.toString(for: .minOrHour)).font(timeFont ?? .caption2.monospacedDigit())
|
||||
Slider(value: Binding {
|
||||
Float(model.currentTime)
|
||||
} set: { newValue, _ in
|
||||
model.currentTime = Int(newValue)
|
||||
}, in: 0 ... Float(model.totalTime)) { onEditingChanged in
|
||||
if onEditingChanged {
|
||||
config.playerLayer?.pause()
|
||||
} else {
|
||||
config.seek(time: TimeInterval(model.currentTime))
|
||||
}
|
||||
}
|
||||
.frame(maxHeight: 20)
|
||||
#if os(xrOS)
|
||||
.tint(.white.opacity(0.8))
|
||||
#endif
|
||||
Text((model.totalTime).toString(for: .minOrHour)).font(timeFont ?? .caption2.monospacedDigit())
|
||||
}
|
||||
.font(.system(.title2))
|
||||
} else {
|
||||
Text("Live Streaming")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension EventModifiers {
|
||||
static let none = Self()
|
||||
}
|
||||
|
||||
@available(iOS 16, tvOS 16, macOS 13, *)
|
||||
struct VideoSubtitleView: View {
|
||||
@ObservedObject
|
||||
fileprivate var model: SubtitleModel
|
||||
var body: some View {
|
||||
ZStack {
|
||||
ForEach(model.parts) { part in
|
||||
part.subtitleView
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fileprivate static func imageView(_ image: UIImage) -> some View {
|
||||
#if enableFeatureLiveText && canImport(VisionKit) && !targetEnvironment(simulator)
|
||||
if #available(macCatalyst 17.0, *) {
|
||||
return LiveTextImage(uiImage: image)
|
||||
} else {
|
||||
return Image(uiImage: image)
|
||||
.resizable()
|
||||
}
|
||||
#else
|
||||
return Image(uiImage: image)
|
||||
.resizable()
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
private extension SubtitlePart {
|
||||
@available(iOS 16, tvOS 16, macOS 13, *)
|
||||
@MainActor
|
||||
var subtitleView: some View {
|
||||
VStack {
|
||||
if let image {
|
||||
Spacer()
|
||||
GeometryReader { geometry in
|
||||
let fitRect = image.fitRect(geometry.size)
|
||||
VideoSubtitleView.imageView(image)
|
||||
.offset(CGSize(width: fitRect.origin.x, height: fitRect.origin.y))
|
||||
.frame(width: fitRect.size.width, height: fitRect.size.height)
|
||||
}
|
||||
// 不能加scaledToFit。不然的话图片的缩放比率会有问题。
|
||||
// .scaledToFit()
|
||||
.padding()
|
||||
} else if let text {
|
||||
let textPosition = textPosition ?? SubtitleModel.textPosition
|
||||
if textPosition.verticalAlign == .bottom || textPosition.verticalAlign == .center {
|
||||
Spacer()
|
||||
}
|
||||
Text(AttributedString(text))
|
||||
.font(Font(SubtitleModel.textFont))
|
||||
.shadow(color: .black.opacity(0.9), radius: 1, x: 1, y: 1)
|
||||
.foregroundColor(SubtitleModel.textColor)
|
||||
.italic(SubtitleModel.textItalic)
|
||||
.background(SubtitleModel.textBackgroundColor)
|
||||
.multilineTextAlignment(.center)
|
||||
.alignmentGuide(textPosition.horizontalAlign) {
|
||||
$0[.leading]
|
||||
}
|
||||
.padding(textPosition.edgeInsets)
|
||||
#if !os(tvOS)
|
||||
.textSelection(.enabled)
|
||||
#endif
|
||||
if textPosition.verticalAlign == .top || textPosition.verticalAlign == .center {
|
||||
Spacer()
|
||||
}
|
||||
} else {
|
||||
// 需要加这个,不然图片无法清空。感觉是 swiftUI的bug。
|
||||
Text("")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@available(iOS 16, tvOS 16, macOS 13, *)
|
||||
struct VideoSettingView: View {
|
||||
@ObservedObject
|
||||
fileprivate var config: KSVideoPlayer.Coordinator
|
||||
@ObservedObject
|
||||
fileprivate var subtitleModel: SubtitleModel
|
||||
@State
|
||||
fileprivate var subtitleTitle: String
|
||||
@Environment(\.dismiss)
|
||||
private var dismiss
|
||||
|
||||
var body: some View {
|
||||
PlatformView {
|
||||
let videoTracks = config.playerLayer?.player.tracks(mediaType: .video)
|
||||
if let videoTracks, !videoTracks.isEmpty {
|
||||
Picker(selection: Binding {
|
||||
videoTracks.first { $0.isEnabled }?.trackID
|
||||
} set: { value in
|
||||
if let track = videoTracks.first(where: { $0.trackID == value }) {
|
||||
config.playerLayer?.player.select(track: track)
|
||||
}
|
||||
}) {
|
||||
ForEach(videoTracks, id: \.trackID) { track in
|
||||
Text(track.description).tag(track.trackID as Int32?)
|
||||
}
|
||||
} label: {
|
||||
Label("Video Track", systemImage: "video.fill")
|
||||
}
|
||||
LabeledContent("Video Type", value: (videoTracks.first { $0.isEnabled }?.dynamicRange ?? .sdr).description)
|
||||
}
|
||||
TextField("Sutitle delay", value: $subtitleModel.subtitleDelay, format: .number)
|
||||
TextField("Title", text: $subtitleTitle)
|
||||
Button("Search Sutitle") {
|
||||
subtitleModel.searchSubtitle(query: subtitleTitle, languages: ["zh-cn"])
|
||||
}
|
||||
LabeledContent("Stream Type", value: (videoTracks?.first { $0.isEnabled }?.fieldOrder ?? .progressive).description)
|
||||
if let dynamicInfo = config.playerLayer?.player.dynamicInfo {
|
||||
DynamicInfoView(dynamicInfo: dynamicInfo)
|
||||
}
|
||||
if let fileSize = config.playerLayer?.player.fileSize, fileSize > 0 {
|
||||
LabeledContent("File Size", value: fileSize.kmFormatted + "B")
|
||||
}
|
||||
}
|
||||
#if os(macOS) || targetEnvironment(macCatalyst) || os(xrOS)
|
||||
.toolbar {
|
||||
Button("Done") {
|
||||
dismiss()
|
||||
}
|
||||
.keyboardShortcut(.defaultAction)
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@available(iOS 16, tvOS 16, macOS 13, *)
|
||||
public struct DynamicInfoView: View {
|
||||
@ObservedObject
|
||||
fileprivate var dynamicInfo: DynamicInfo
|
||||
public var body: some View {
|
||||
LabeledContent("Display FPS", value: dynamicInfo.displayFPS, format: .number)
|
||||
LabeledContent("Audio Video sync", value: dynamicInfo.audioVideoSyncDiff, format: .number)
|
||||
LabeledContent("Dropped Frames", value: dynamicInfo.droppedVideoFrameCount + dynamicInfo.droppedVideoPacketCount, format: .number)
|
||||
LabeledContent("Bytes Read", value: dynamicInfo.bytesRead.kmFormatted + "B")
|
||||
LabeledContent("Audio bitrate", value: dynamicInfo.audioBitrate.kmFormatted + "bps")
|
||||
LabeledContent("Video bitrate", value: dynamicInfo.videoBitrate.kmFormatted + "bps")
|
||||
}
|
||||
}
|
||||
|
||||
@available(iOS 15, tvOS 16, macOS 12, *)
|
||||
public struct PlatformView<Content: View>: View {
|
||||
private let content: () -> Content
|
||||
public var body: some View {
|
||||
#if os(tvOS)
|
||||
ScrollView {
|
||||
content()
|
||||
.padding()
|
||||
}
|
||||
.pickerStyle(.navigationLink)
|
||||
#else
|
||||
Form {
|
||||
content()
|
||||
}
|
||||
#if os(macOS)
|
||||
.padding()
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
|
||||
public init(@ViewBuilder content: @escaping () -> Content) {
|
||||
self.content = content
|
||||
}
|
||||
}
|
||||
|
||||
@available(iOS 16.0, macOS 13.0, tvOS 16.0, watchOS 9.0, *)
|
||||
struct KSVideoPlayerView_Previews: PreviewProvider {
|
||||
static var previews: some View {
|
||||
let url = URL(string: "http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4")!
|
||||
KSVideoPlayerView(coordinator: KSVideoPlayer.Coordinator(), url: url, options: KSOptions())
|
||||
}
|
||||
}
|
||||
|
||||
// struct AVContentView: View {
|
||||
// var body: some View {
|
||||
// StructAVPlayerView().frame(width: UIScene.main.bounds.width, height: 400, alignment: .center)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// struct StructAVPlayerView: UIViewRepresentable {
|
||||
// let playerVC = AVPlayerViewController()
|
||||
// typealias UIViewType = UIView
|
||||
// func makeUIView(context _: Context) -> UIView {
|
||||
// playerVC.view
|
||||
// }
|
||||
//
|
||||
// func updateUIView(_: UIView, context _: Context) {
|
||||
// playerVC.player = AVPlayer(url: URL(string: "https://bitmovin-a.akamaihd.net/content/dataset/multi-codec/hevc/stream_fmp4.m3u8")!)
|
||||
// }
|
||||
// }
|
||||
@@ -0,0 +1,193 @@
|
||||
//
|
||||
// KSVideoPlayerViewBuilder.swift
|
||||
//
|
||||
//
|
||||
// Created by Ian Magallan Bosch on 17.03.24.
|
||||
//
|
||||
|
||||
import SwiftUI
|
||||
|
||||
@available(iOS 16.0, macOS 13.0, tvOS 16.0, *)
|
||||
enum KSVideoPlayerViewBuilder {
|
||||
@MainActor
|
||||
static func playbackControlView(config: KSVideoPlayer.Coordinator, spacing: CGFloat? = nil) -> some View {
|
||||
HStack(spacing: spacing) {
|
||||
// Playback controls don't need spacers for visionOS, since the controls are laid out in a HStack.
|
||||
#if os(xrOS)
|
||||
backwardButton(config: config)
|
||||
playButton(config: config)
|
||||
forwardButton(config: config)
|
||||
#else
|
||||
Spacer()
|
||||
backwardButton(config: config)
|
||||
Spacer()
|
||||
playButton(config: config)
|
||||
Spacer()
|
||||
forwardButton(config: config)
|
||||
Spacer()
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
static func contentModeButton(config: KSVideoPlayer.Coordinator) -> some View {
|
||||
Button {
|
||||
config.isScaleAspectFill.toggle()
|
||||
} label: {
|
||||
Image(systemName: config.isScaleAspectFill ? "rectangle.arrowtriangle.2.inward" : "rectangle.arrowtriangle.2.outward")
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
static func subtitleButton(config: KSVideoPlayer.Coordinator) -> some View {
|
||||
MenuView(selection: Binding {
|
||||
config.subtitleModel.selectedSubtitleInfo?.subtitleID
|
||||
} set: { value in
|
||||
let info = config.subtitleModel.subtitleInfos.first { $0.subtitleID == value }
|
||||
config.subtitleModel.selectedSubtitleInfo = info
|
||||
if let info = info as? MediaPlayerTrack {
|
||||
// 因为图片字幕想要实时的显示,那就需要seek。所以需要走select track
|
||||
config.playerLayer?.player.select(track: info)
|
||||
}
|
||||
}) {
|
||||
Text("Off").tag(nil as String?)
|
||||
ForEach(config.subtitleModel.subtitleInfos, id: \.subtitleID) { track in
|
||||
Text(track.name).tag(track.subtitleID as String?)
|
||||
}
|
||||
} label: {
|
||||
Image(systemName: "text.bubble.fill")
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
static func playbackRateButton(playbackRate: Binding<Float>) -> some View {
|
||||
MenuView(selection: playbackRate) {
|
||||
ForEach([0.5, 1.0, 1.25, 1.5, 2.0] as [Float]) { value in
|
||||
// 需要有一个变量text。不然会自动帮忙加很多0
|
||||
let text = "\(value) x"
|
||||
Text(text).tag(value)
|
||||
}
|
||||
} label: {
|
||||
Image(systemName: "gauge.with.dots.needle.67percent")
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
static func titleView(title: String, config: KSVideoPlayer.Coordinator) -> some View {
|
||||
HStack {
|
||||
Text(title)
|
||||
.font(.title3)
|
||||
ProgressView()
|
||||
.opacity(config.state == .buffering ? 1 : 0)
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
static func muteButton(config: KSVideoPlayer.Coordinator) -> some View {
|
||||
Button {
|
||||
config.isMuted.toggle()
|
||||
} label: {
|
||||
Image(systemName: config.isMuted ? speakerDisabledSystemName : speakerSystemName)
|
||||
}
|
||||
.shadow(color: .black, radius: 1)
|
||||
}
|
||||
|
||||
static func infoButton(showVideoSetting: Binding<Bool>) -> some View {
|
||||
Button {
|
||||
showVideoSetting.wrappedValue.toggle()
|
||||
} label: {
|
||||
Image(systemName: "info.circle.fill")
|
||||
}
|
||||
// iOS 模拟器加keyboardShortcut会导致KSVideoPlayer.Coordinator无法释放。真机不会有这个问题
|
||||
#if !os(tvOS)
|
||||
.keyboardShortcut("i", modifiers: [.command])
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@available(iOS 16.0, macOS 13.0, tvOS 16.0, *)
|
||||
private extension KSVideoPlayerViewBuilder {
|
||||
static var playSystemName: String {
|
||||
#if os(xrOS)
|
||||
"play.fill"
|
||||
#else
|
||||
"play.circle.fill"
|
||||
#endif
|
||||
}
|
||||
|
||||
static var pauseSystemName: String {
|
||||
#if os(xrOS)
|
||||
"pause.fill"
|
||||
#else
|
||||
"pause.circle.fill"
|
||||
#endif
|
||||
}
|
||||
|
||||
static var speakerSystemName: String {
|
||||
#if os(xrOS)
|
||||
"speaker.fill"
|
||||
#else
|
||||
"speaker.wave.2.circle.fill"
|
||||
#endif
|
||||
}
|
||||
|
||||
static var speakerDisabledSystemName: String {
|
||||
#if os(xrOS)
|
||||
"speaker.slash.fill"
|
||||
#else
|
||||
"speaker.slash.circle.fill"
|
||||
#endif
|
||||
}
|
||||
|
||||
@MainActor
|
||||
@ViewBuilder
|
||||
static func backwardButton(config: KSVideoPlayer.Coordinator) -> some View {
|
||||
if config.playerLayer?.player.seekable ?? false {
|
||||
Button {
|
||||
config.skip(interval: -15)
|
||||
} label: {
|
||||
Image(systemName: "gobackward.15")
|
||||
.font(.largeTitle)
|
||||
}
|
||||
#if !os(tvOS)
|
||||
.keyboardShortcut(.leftArrow, modifiers: .none)
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
@ViewBuilder
|
||||
static func forwardButton(config: KSVideoPlayer.Coordinator) -> some View {
|
||||
if config.playerLayer?.player.seekable ?? false {
|
||||
Button {
|
||||
config.skip(interval: 15)
|
||||
} label: {
|
||||
Image(systemName: "goforward.15")
|
||||
.font(.largeTitle)
|
||||
}
|
||||
#if !os(tvOS)
|
||||
.keyboardShortcut(.rightArrow, modifiers: .none)
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
static func playButton(config: KSVideoPlayer.Coordinator) -> some View {
|
||||
Button {
|
||||
if config.state.isPlaying {
|
||||
config.playerLayer?.pause()
|
||||
} else {
|
||||
config.playerLayer?.play()
|
||||
}
|
||||
} label: {
|
||||
Image(systemName: config.state == .error ? "play.slash.fill" : (config.state.isPlaying ? pauseSystemName : playSystemName))
|
||||
.font(.largeTitle)
|
||||
}
|
||||
#if os(xrOS)
|
||||
.contentTransition(.symbolEffect(.replace))
|
||||
#endif
|
||||
#if !os(tvOS)
|
||||
.keyboardShortcut(.space, modifiers: .none)
|
||||
#endif
|
||||
}
|
||||
}
|
||||
91
KSPlayer-main/Sources/KSPlayer/SwiftUI/LiveTextImage.swift
Normal file
91
KSPlayer-main/Sources/KSPlayer/SwiftUI/LiveTextImage.swift
Normal file
@@ -0,0 +1,91 @@
|
||||
//
|
||||
// LiveTextImage.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2023/5/4.
|
||||
//
|
||||
|
||||
import SwiftUI
|
||||
#if canImport(VisionKit)
|
||||
import VisionKit
|
||||
|
||||
@available(iOS 16.0, macOS 13.0, macCatalyst 17.0, *)
|
||||
@MainActor
|
||||
public struct LiveTextImage: UIViewRepresentable {
|
||||
public let uiImage: UIImage
|
||||
private let analyzer = ImageAnalyzer()
|
||||
#if canImport(UIKit)
|
||||
public typealias UIViewType = UIImageView
|
||||
private let interaction = ImageAnalysisInteraction()
|
||||
public init(uiImage: UIImage) {
|
||||
self.uiImage = uiImage
|
||||
}
|
||||
|
||||
public func makeUIView(context _: Context) -> UIViewType {
|
||||
let imageView = LiveTextImageView()
|
||||
imageView.addInteraction(interaction)
|
||||
return imageView
|
||||
}
|
||||
|
||||
public func updateUIView(_ view: UIViewType, context _: Context) {
|
||||
updateView(view)
|
||||
}
|
||||
#else
|
||||
public typealias NSViewType = UIImageView
|
||||
@MainActor
|
||||
private let interaction = ImageAnalysisOverlayView()
|
||||
public func makeNSView(context _: Context) -> NSViewType {
|
||||
let imageView = LiveTextImageView()
|
||||
interaction.autoresizingMask = [.width, .height]
|
||||
interaction.frame = imageView.bounds
|
||||
interaction.trackingImageView = imageView
|
||||
imageView.addSubview(interaction)
|
||||
return imageView
|
||||
}
|
||||
|
||||
public func updateNSView(_ view: NSViewType, context _: Context) {
|
||||
updateView(view)
|
||||
}
|
||||
#endif
|
||||
@MainActor
|
||||
private func updateView(_ view: UIImageView) {
|
||||
view.image = uiImage
|
||||
view.sizeToFit()
|
||||
let image = uiImage
|
||||
Task { @MainActor in
|
||||
do {
|
||||
let configuration = ImageAnalyzer.Configuration([.text])
|
||||
let analysis = try await analyzer.analyze(image, orientation: .up, configuration: configuration)
|
||||
interaction.preferredInteractionTypes = .textSelection
|
||||
interaction.analysis = analysis
|
||||
} catch {
|
||||
print(error.localizedDescription)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#if os(macOS)
|
||||
public extension Image {
|
||||
init(uiImage: UIImage) {
|
||||
self.init(nsImage: uiImage)
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
public extension UIImage {
|
||||
func fitRect(_ fitSize: CGSize) -> CGRect {
|
||||
let hZoom = fitSize.width / size.width
|
||||
let vZoom = fitSize.height / size.height
|
||||
let zoom = min(min(hZoom, vZoom), 1)
|
||||
let newSize = size * zoom
|
||||
return CGRect(origin: CGPoint(x: (fitSize.width - newSize.width) / 2, y: fitSize.height - newSize.height), size: newSize)
|
||||
}
|
||||
}
|
||||
|
||||
class LiveTextImageView: UIImageView {
|
||||
override var intrinsicContentSize: CGSize {
|
||||
.zero
|
||||
}
|
||||
}
|
||||
171
KSPlayer-main/Sources/KSPlayer/SwiftUI/Slider.swift
Normal file
171
KSPlayer-main/Sources/KSPlayer/SwiftUI/Slider.swift
Normal file
@@ -0,0 +1,171 @@
|
||||
//
|
||||
// Slider.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2023/5/4.
|
||||
//
|
||||
|
||||
import SwiftUI
|
||||
|
||||
#if os(tvOS)
|
||||
import Combine
|
||||
|
||||
@available(tvOS 15.0, *)
|
||||
public struct Slider: View {
|
||||
private let value: Binding<Float>
|
||||
private let bounds: ClosedRange<Float>
|
||||
private let onEditingChanged: (Bool) -> Void
|
||||
@FocusState
|
||||
private var isFocused: Bool
|
||||
public init(value: Binding<Float>, in bounds: ClosedRange<Float> = 0 ... 1, onEditingChanged: @escaping (Bool) -> Void = { _ in }) {
|
||||
self.value = value
|
||||
self.bounds = bounds
|
||||
self.onEditingChanged = onEditingChanged
|
||||
}
|
||||
|
||||
public var body: some View {
|
||||
TVOSSlide(value: value, bounds: bounds, isFocused: _isFocused, onEditingChanged: onEditingChanged)
|
||||
.focused($isFocused)
|
||||
}
|
||||
}
|
||||
|
||||
@available(tvOS 15.0, *)
|
||||
public struct TVOSSlide: UIViewRepresentable {
|
||||
fileprivate let value: Binding<Float>
|
||||
fileprivate let bounds: ClosedRange<Float>
|
||||
@FocusState
|
||||
public var isFocused: Bool
|
||||
public let onEditingChanged: (Bool) -> Void
|
||||
public typealias UIViewType = TVSlide
|
||||
public func makeUIView(context _: Context) -> UIViewType {
|
||||
TVSlide(value: value, bounds: bounds, onEditingChanged: onEditingChanged)
|
||||
}
|
||||
|
||||
public func updateUIView(_ view: UIViewType, context _: Context) {
|
||||
if isFocused {
|
||||
if view.processView.tintColor == .white {
|
||||
view.processView.tintColor = .red
|
||||
}
|
||||
} else {
|
||||
view.processView.tintColor = .white
|
||||
}
|
||||
// 要加这个才会触发进度条更新
|
||||
let process = (value.wrappedValue - bounds.lowerBound) / (bounds.upperBound - bounds.lowerBound)
|
||||
if process != view.processView.progress {
|
||||
view.processView.progress = process
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class TVSlide: UIControl {
|
||||
fileprivate let processView = UIProgressView()
|
||||
private var beganValue = Float(0.0)
|
||||
private let onEditingChanged: (Bool) -> Void
|
||||
fileprivate var value: Binding<Float>
|
||||
fileprivate let ranges: ClosedRange<Float>
|
||||
private var moveDirection: UISwipeGestureRecognizer.Direction?
|
||||
private var pressTime = CACurrentMediaTime()
|
||||
private var delayItem: DispatchWorkItem?
|
||||
|
||||
private lazy var timer: Timer = .scheduledTimer(withTimeInterval: 0.15, repeats: true) { [weak self] _ in
|
||||
guard let self, let moveDirection = self.moveDirection else {
|
||||
return
|
||||
}
|
||||
let rate = min(10, Int((CACurrentMediaTime() - self.pressTime) / 2) + 1)
|
||||
let wrappedValue = self.value.wrappedValue + Float((moveDirection == .right ? 10 : -10) * rate)
|
||||
if wrappedValue >= self.ranges.lowerBound, wrappedValue <= self.ranges.upperBound {
|
||||
self.value.wrappedValue = wrappedValue
|
||||
}
|
||||
self.onEditingChanged(true)
|
||||
}
|
||||
|
||||
public init(value: Binding<Float>, bounds: ClosedRange<Float>, onEditingChanged: @escaping (Bool) -> Void) {
|
||||
self.value = value
|
||||
ranges = bounds
|
||||
self.onEditingChanged = onEditingChanged
|
||||
super.init(frame: .zero)
|
||||
processView.translatesAutoresizingMaskIntoConstraints = false
|
||||
processView.tintColor = .white
|
||||
addSubview(processView)
|
||||
NSLayoutConstraint.activate([
|
||||
processView.topAnchor.constraint(equalTo: topAnchor),
|
||||
processView.leadingAnchor.constraint(equalTo: leadingAnchor),
|
||||
processView.trailingAnchor.constraint(equalTo: trailingAnchor),
|
||||
processView.bottomAnchor.constraint(equalTo: bottomAnchor),
|
||||
])
|
||||
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(actionPanGesture(sender:)))
|
||||
addGestureRecognizer(panGestureRecognizer)
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
override open func pressesBegan(_ presses: Set<UIPress>, with event: UIPressesEvent?) {
|
||||
guard let presse = presses.first else {
|
||||
return
|
||||
}
|
||||
delayItem?.cancel()
|
||||
delayItem = nil
|
||||
switch presse.type {
|
||||
case .leftArrow:
|
||||
moveDirection = .left
|
||||
pressTime = CACurrentMediaTime()
|
||||
timer.fireDate = Date.distantPast
|
||||
case .rightArrow:
|
||||
moveDirection = .right
|
||||
pressTime = CACurrentMediaTime()
|
||||
timer.fireDate = Date.distantPast
|
||||
case .select:
|
||||
timer.fireDate = Date.distantFuture
|
||||
onEditingChanged(false)
|
||||
default: super.pressesBegan(presses, with: event)
|
||||
}
|
||||
}
|
||||
|
||||
override open func pressesEnded(_ presses: Set<UIPress>, with _: UIPressesEvent?) {
|
||||
timer.fireDate = Date.distantFuture
|
||||
guard let presse = presses.first, presse.type == .leftArrow || presse.type == .rightArrow else {
|
||||
return
|
||||
}
|
||||
delayItem = DispatchWorkItem { [weak self] in
|
||||
guard let self else { return }
|
||||
self.onEditingChanged(false)
|
||||
}
|
||||
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 1.5,
|
||||
execute: delayItem!)
|
||||
}
|
||||
|
||||
@objc private func actionPanGesture(sender: UIPanGestureRecognizer) {
|
||||
let translation = sender.translation(in: self)
|
||||
if abs(translation.y) > abs(translation.x) {
|
||||
return
|
||||
}
|
||||
switch sender.state {
|
||||
case .began, .possible:
|
||||
delayItem?.cancel()
|
||||
delayItem = nil
|
||||
beganValue = value.wrappedValue
|
||||
case .changed:
|
||||
let wrappedValue = beganValue + Float(translation.x) / Float(frame.size.width) * (ranges.upperBound - ranges.lowerBound) / 5
|
||||
if wrappedValue <= ranges.upperBound, wrappedValue >= ranges.lowerBound {
|
||||
value.wrappedValue = wrappedValue
|
||||
onEditingChanged(true)
|
||||
}
|
||||
case .ended:
|
||||
delayItem = DispatchWorkItem { [weak self] in
|
||||
guard let self else { return }
|
||||
self.onEditingChanged(false)
|
||||
}
|
||||
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 1.5,
|
||||
execute: delayItem!)
|
||||
case .cancelled, .failed:
|
||||
// value.wrappedValue = beganValue
|
||||
break
|
||||
@unknown default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
225
KSPlayer-main/Sources/KSPlayer/Video/BrightnessVolume.swift
Normal file
225
KSPlayer-main/Sources/KSPlayer/Video/BrightnessVolume.swift
Normal file
@@ -0,0 +1,225 @@
|
||||
//
|
||||
// BrightnessVolume.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2017/11/3.
|
||||
//
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
|
||||
@MainActor
|
||||
open class BrightnessVolume {
|
||||
private var brightnessObservation: NSKeyValueObservation?
|
||||
public static let shared = BrightnessVolume()
|
||||
public var progressView: BrightnessVolumeViewProtocol & UIView = ProgressView()
|
||||
init() {
|
||||
#if !os(tvOS) && !os(xrOS)
|
||||
brightnessObservation = UIScreen.main.observe(\.brightness, options: .new) { [weak self] _, change in
|
||||
guard KSOptions.enableBrightnessGestures else { return }
|
||||
if let self, let value = change.newValue {
|
||||
self.appearView()
|
||||
self.progressView.setProgress(Float(value), type: 0)
|
||||
}
|
||||
}
|
||||
#endif
|
||||
let name = NSNotification.Name(rawValue: "AVSystemController_SystemVolumeDidChangeNotification")
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(volumeIsChanged(notification:)), name: name, object: nil)
|
||||
progressView.alpha = 0.0
|
||||
}
|
||||
|
||||
public func move(to view: UIView) {
|
||||
progressView.move(to: view)
|
||||
}
|
||||
|
||||
@objc private func volumeIsChanged(notification: NSNotification) {
|
||||
guard KSOptions.enableVolumeGestures else { return }
|
||||
if let changeReason = notification.userInfo?["AVSystemController_AudioVolumeChangeReasonNotificationParameter"] as? String, changeReason == "ExplicitVolumeChange" {
|
||||
if let volume = notification.userInfo?["AVSystemController_AudioVolumeNotificationParameter"] as? CGFloat {
|
||||
appearView()
|
||||
progressView.setProgress(Float(volume), type: 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func appearView() {
|
||||
if progressView.alpha == 0.0 {
|
||||
progressView.alpha = 1.0
|
||||
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 3) { [weak self] () in
|
||||
self?.disAppearView()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func disAppearView() {
|
||||
if progressView.alpha == 1.0 {
|
||||
UIView.animate(withDuration: 0.8) { [weak self] () in
|
||||
self?.progressView.alpha = 0.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
brightnessObservation?.invalidate()
|
||||
}
|
||||
}
|
||||
|
||||
public protocol BrightnessVolumeViewProtocol {
|
||||
// type: 0 brightness type: 1 volume
|
||||
func setProgress(_ progress: Float, type: UInt)
|
||||
func move(to view: UIView)
|
||||
}
|
||||
|
||||
private final class SystemView: UIVisualEffectView {
|
||||
private let stackView = UIStackView()
|
||||
private let imageView = UIImageView()
|
||||
private let titleLabel = UILabel()
|
||||
private lazy var brightnessImage = UIImage(systemName: "sun.max")
|
||||
private lazy var volumeImage = UIImage(systemName: "speaker.wave.3.fill")
|
||||
private convenience init() {
|
||||
self.init(effect: UIBlurEffect(style: .extraLight))
|
||||
clipsToBounds = true
|
||||
cornerRadius = 10
|
||||
imageView.image = brightnessImage
|
||||
contentView.addSubview(imageView)
|
||||
titleLabel.font = .systemFont(ofSize: 16)
|
||||
titleLabel.textColor = UIColor(red: 0.25, green: 0.22, blue: 0.21, alpha: 1)
|
||||
titleLabel.textAlignment = .center
|
||||
titleLabel.text = "亮度"
|
||||
contentView.addSubview(titleLabel)
|
||||
let longView = UIView()
|
||||
longView.backgroundColor = titleLabel.textColor
|
||||
contentView.addSubview(longView)
|
||||
stackView.alignment = .center
|
||||
stackView.distribution = .fillEqually
|
||||
stackView.axis = .horizontal
|
||||
stackView.spacing = 1
|
||||
longView.addSubview(stackView)
|
||||
for _ in 0 ..< 16 {
|
||||
let tipView = UIView()
|
||||
tipView.backgroundColor = .white
|
||||
stackView.addArrangedSubview(tipView)
|
||||
tipView.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
tipView.heightAnchor.constraint(equalTo: stackView.heightAnchor),
|
||||
])
|
||||
}
|
||||
translatesAutoresizingMaskIntoConstraints = false
|
||||
imageView.translatesAutoresizingMaskIntoConstraints = false
|
||||
titleLabel.translatesAutoresizingMaskIntoConstraints = false
|
||||
longView.translatesAutoresizingMaskIntoConstraints = false
|
||||
stackView.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
imageView.widthAnchor.constraint(equalToConstant: 79),
|
||||
imageView.heightAnchor.constraint(equalToConstant: 76),
|
||||
imageView.centerYAnchor.constraint(equalTo: centerYAnchor),
|
||||
imageView.centerXAnchor.constraint(equalTo: centerXAnchor),
|
||||
titleLabel.topAnchor.constraint(equalTo: topAnchor, constant: 5),
|
||||
titleLabel.widthAnchor.constraint(equalTo: widthAnchor),
|
||||
titleLabel.centerXAnchor.constraint(equalTo: centerXAnchor),
|
||||
titleLabel.heightAnchor.constraint(equalToConstant: 30),
|
||||
longView.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 13),
|
||||
longView.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -13),
|
||||
longView.heightAnchor.constraint(equalToConstant: 7),
|
||||
longView.bottomAnchor.constraint(equalTo: bottomAnchor, constant: -16),
|
||||
stackView.leadingAnchor.constraint(equalTo: longView.leadingAnchor, constant: 1),
|
||||
stackView.trailingAnchor.constraint(equalTo: longView.trailingAnchor, constant: -1),
|
||||
stackView.topAnchor.constraint(equalTo: longView.topAnchor, constant: 1),
|
||||
stackView.bottomAnchor.constraint(equalTo: longView.bottomAnchor, constant: -1),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
extension SystemView: BrightnessVolumeViewProtocol {
|
||||
public func setProgress(_ progress: Float, type: UInt) {
|
||||
if type == 0 {
|
||||
imageView.image = brightnessImage
|
||||
titleLabel.text = NSLocalizedString("brightness", comment: "")
|
||||
} else {
|
||||
imageView.image = volumeImage
|
||||
titleLabel.text = NSLocalizedString("volume", comment: "")
|
||||
}
|
||||
let level = Int(progress * Float(stackView.arrangedSubviews.count))
|
||||
for i in 0 ..< stackView.arrangedSubviews.count {
|
||||
let view = stackView.arrangedSubviews[i]
|
||||
if i <= level, level > 0 {
|
||||
view.alpha = 1
|
||||
} else {
|
||||
view.alpha = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func move(to view: UIView) {
|
||||
if superview != view {
|
||||
removeFromSuperview()
|
||||
view.addSubview(self)
|
||||
translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
centerXAnchor.constraint(equalTo: view.centerXAnchor),
|
||||
centerYAnchor.constraint(equalTo: view.centerYAnchor),
|
||||
heightAnchor.constraint(equalToConstant: 155),
|
||||
widthAnchor.constraint(equalToConstant: 155),
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final class ProgressView: UIView {
|
||||
private lazy var brightnessImage = UIImage(systemName: "sun.max")
|
||||
private lazy var volumeImage = UIImage(systemName: "speaker.fill")
|
||||
private lazy var brightnessOffImage = UIImage(systemName: "sun.min")
|
||||
private lazy var volumeOffImage = UIImage(systemName: "speaker.slash.fill")
|
||||
private let progressView = UIProgressView()
|
||||
private let imageView = UIImageView()
|
||||
|
||||
override init(frame _: CGRect) {
|
||||
super.init(frame: .zero)
|
||||
addSubview(progressView)
|
||||
addSubview(imageView)
|
||||
progressView.progressTintColor = UIColor.white
|
||||
progressView.trackTintColor = UIColor.white.withAlphaComponent(0.5)
|
||||
progressView.translatesAutoresizingMaskIntoConstraints = false
|
||||
progressView.centerRotate(byDegrees: -90)
|
||||
imageView.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
progressView.widthAnchor.constraint(equalToConstant: 115),
|
||||
progressView.heightAnchor.constraint(equalToConstant: 2),
|
||||
progressView.centerXAnchor.constraint(equalTo: centerXAnchor),
|
||||
progressView.topAnchor.constraint(equalTo: topAnchor, constant: 57),
|
||||
imageView.leadingAnchor.constraint(equalTo: leadingAnchor),
|
||||
imageView.trailingAnchor.constraint(equalTo: trailingAnchor),
|
||||
imageView.bottomAnchor.constraint(equalTo: bottomAnchor),
|
||||
])
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
}
|
||||
|
||||
extension ProgressView: BrightnessVolumeViewProtocol {
|
||||
func setProgress(_ progress: Float, type: UInt) {
|
||||
progressView.setProgress(progress, animated: false)
|
||||
if progress == 0 {
|
||||
imageView.image = type == 0 ? brightnessOffImage : volumeOffImage
|
||||
} else {
|
||||
imageView.image = type == 0 ? brightnessImage : volumeImage
|
||||
}
|
||||
}
|
||||
|
||||
func move(to view: UIView) {
|
||||
if superview != view {
|
||||
removeFromSuperview()
|
||||
view.addSubview(self)
|
||||
translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
trailingAnchor.constraint(equalTo: view.safeTrailingAnchor, constant: -10),
|
||||
centerYAnchor.constraint(equalTo: view.centerYAnchor),
|
||||
heightAnchor.constraint(equalToConstant: 150),
|
||||
widthAnchor.constraint(equalToConstant: 24),
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
474
KSPlayer-main/Sources/KSPlayer/Video/IOSVideoPlayerView.swift
Normal file
474
KSPlayer-main/Sources/KSPlayer/Video/IOSVideoPlayerView.swift
Normal file
@@ -0,0 +1,474 @@
|
||||
//
|
||||
// IOSVideoPlayerView.swift
|
||||
// Pods
|
||||
//
|
||||
// Created by kintan on 2018/10/31.
|
||||
//
|
||||
#if canImport(UIKit) && canImport(CallKit)
|
||||
import AVKit
|
||||
import Combine
|
||||
import CoreServices
|
||||
import MediaPlayer
|
||||
import UIKit
|
||||
|
||||
open class IOSVideoPlayerView: VideoPlayerView {
|
||||
private weak var originalSuperView: UIView?
|
||||
private var originalframeConstraints: [NSLayoutConstraint]?
|
||||
private var originalFrame = CGRect.zero
|
||||
private var originalOrientations: UIInterfaceOrientationMask?
|
||||
private weak var fullScreenDelegate: PlayerViewFullScreenDelegate?
|
||||
private var isVolume = false
|
||||
private let volumeView = BrightnessVolume()
|
||||
public var volumeViewSlider = UXSlider()
|
||||
public var backButton = UIButton()
|
||||
public var airplayStatusView: UIView = AirplayStatusView()
|
||||
#if !os(xrOS)
|
||||
public var routeButton = AVRoutePickerView()
|
||||
#endif
|
||||
private let routeDetector = AVRouteDetector()
|
||||
/// Image view to show video cover
|
||||
public var maskImageView = UIImageView()
|
||||
public var landscapeButton: UIControl = UIButton()
|
||||
override open var isMaskShow: Bool {
|
||||
didSet {
|
||||
fullScreenDelegate?.player(isMaskShow: isMaskShow, isFullScreen: landscapeButton.isSelected)
|
||||
}
|
||||
}
|
||||
|
||||
#if !os(xrOS)
|
||||
private var brightness: CGFloat = UIScreen.main.brightness {
|
||||
didSet {
|
||||
UIScreen.main.brightness = brightness
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
override open func customizeUIComponents() {
|
||||
super.customizeUIComponents()
|
||||
if UIDevice.current.userInterfaceIdiom == .phone {
|
||||
subtitleLabel.font = .systemFont(ofSize: 14)
|
||||
}
|
||||
insertSubview(maskImageView, at: 0)
|
||||
maskImageView.contentMode = .scaleAspectFit
|
||||
toolBar.addArrangedSubview(landscapeButton)
|
||||
landscapeButton.tag = PlayerButtonType.landscape.rawValue
|
||||
landscapeButton.addTarget(self, action: #selector(onButtonPressed(_:)), for: .touchUpInside)
|
||||
landscapeButton.tintColor = .white
|
||||
if let landscapeButton = landscapeButton as? UIButton {
|
||||
landscapeButton.setImage(UIImage(systemName: "arrow.up.left.and.arrow.down.right"), for: .normal)
|
||||
landscapeButton.setImage(UIImage(systemName: "arrow.down.right.and.arrow.up.left"), for: .selected)
|
||||
}
|
||||
backButton.tag = PlayerButtonType.back.rawValue
|
||||
backButton.setImage(UIImage(systemName: "chevron.left"), for: .normal)
|
||||
backButton.addTarget(self, action: #selector(onButtonPressed(_:)), for: .touchUpInside)
|
||||
backButton.tintColor = .white
|
||||
navigationBar.insertArrangedSubview(backButton, at: 0)
|
||||
|
||||
addSubview(airplayStatusView)
|
||||
volumeView.move(to: self)
|
||||
#if !targetEnvironment(macCatalyst)
|
||||
let tmp = MPVolumeView(frame: CGRect(x: -100, y: -100, width: 0, height: 0))
|
||||
if let first = (tmp.subviews.first { $0 is UISlider }) as? UISlider {
|
||||
volumeViewSlider = first
|
||||
}
|
||||
#endif
|
||||
backButton.translatesAutoresizingMaskIntoConstraints = false
|
||||
landscapeButton.translatesAutoresizingMaskIntoConstraints = false
|
||||
maskImageView.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
maskImageView.topAnchor.constraint(equalTo: topAnchor),
|
||||
maskImageView.leadingAnchor.constraint(equalTo: leadingAnchor),
|
||||
maskImageView.bottomAnchor.constraint(equalTo: bottomAnchor),
|
||||
maskImageView.trailingAnchor.constraint(equalTo: trailingAnchor),
|
||||
backButton.widthAnchor.constraint(equalToConstant: 25),
|
||||
landscapeButton.widthAnchor.constraint(equalToConstant: 30),
|
||||
airplayStatusView.centerXAnchor.constraint(equalTo: centerXAnchor),
|
||||
airplayStatusView.centerYAnchor.constraint(equalTo: centerYAnchor),
|
||||
])
|
||||
#if !os(xrOS)
|
||||
routeButton.isHidden = true
|
||||
navigationBar.addArrangedSubview(routeButton)
|
||||
routeButton.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
routeButton.widthAnchor.constraint(equalToConstant: 25),
|
||||
])
|
||||
#endif
|
||||
addNotification()
|
||||
}
|
||||
|
||||
override open func resetPlayer() {
|
||||
super.resetPlayer()
|
||||
maskImageView.alpha = 1
|
||||
maskImageView.image = nil
|
||||
panGesture.isEnabled = false
|
||||
#if !os(xrOS)
|
||||
routeButton.isHidden = !routeDetector.multipleRoutesDetected
|
||||
#endif
|
||||
}
|
||||
|
||||
override open func onButtonPressed(type: PlayerButtonType, button: UIButton) {
|
||||
if type == .back, viewController is PlayerFullScreenViewController {
|
||||
updateUI(isFullScreen: false)
|
||||
return
|
||||
}
|
||||
super.onButtonPressed(type: type, button: button)
|
||||
if type == .lock {
|
||||
button.isSelected.toggle()
|
||||
isMaskShow = !button.isSelected
|
||||
button.alpha = 1.0
|
||||
} else if type == .landscape {
|
||||
updateUI(isFullScreen: !landscapeButton.isSelected)
|
||||
}
|
||||
}
|
||||
|
||||
open func isHorizonal() -> Bool {
|
||||
playerLayer?.player.naturalSize.isHorizonal ?? true
|
||||
}
|
||||
|
||||
open func updateUI(isFullScreen: Bool) {
|
||||
guard let viewController else {
|
||||
return
|
||||
}
|
||||
landscapeButton.isSelected = isFullScreen
|
||||
let isHorizonal = isHorizonal()
|
||||
viewController.navigationController?.interactivePopGestureRecognizer?.isEnabled = !isFullScreen
|
||||
if isFullScreen {
|
||||
if viewController is PlayerFullScreenViewController {
|
||||
return
|
||||
}
|
||||
originalSuperView = superview
|
||||
originalframeConstraints = frameConstraints
|
||||
if let originalframeConstraints {
|
||||
NSLayoutConstraint.deactivate(originalframeConstraints)
|
||||
}
|
||||
originalFrame = frame
|
||||
originalOrientations = viewController.supportedInterfaceOrientations
|
||||
let fullVC = PlayerFullScreenViewController(isHorizonal: isHorizonal)
|
||||
fullScreenDelegate = fullVC
|
||||
fullVC.view.addSubview(self)
|
||||
translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
topAnchor.constraint(equalTo: fullVC.view.readableTopAnchor),
|
||||
leadingAnchor.constraint(equalTo: fullVC.view.leadingAnchor),
|
||||
trailingAnchor.constraint(equalTo: fullVC.view.trailingAnchor),
|
||||
bottomAnchor.constraint(equalTo: fullVC.view.bottomAnchor),
|
||||
])
|
||||
fullVC.modalPresentationStyle = .fullScreen
|
||||
fullVC.modalPresentationCapturesStatusBarAppearance = true
|
||||
fullVC.transitioningDelegate = self
|
||||
viewController.present(fullVC, animated: true) {
|
||||
KSOptions.supportedInterfaceOrientations = fullVC.supportedInterfaceOrientations
|
||||
}
|
||||
} else {
|
||||
guard viewController is PlayerFullScreenViewController else {
|
||||
return
|
||||
}
|
||||
let presentingVC = viewController.presentingViewController ?? viewController
|
||||
if let originalOrientations {
|
||||
KSOptions.supportedInterfaceOrientations = originalOrientations
|
||||
}
|
||||
presentingVC.dismiss(animated: true) {
|
||||
self.originalSuperView?.addSubview(self)
|
||||
if let constraints = self.originalframeConstraints, !constraints.isEmpty {
|
||||
NSLayoutConstraint.activate(constraints)
|
||||
} else {
|
||||
self.translatesAutoresizingMaskIntoConstraints = true
|
||||
self.frame = self.originalFrame
|
||||
}
|
||||
}
|
||||
}
|
||||
let isLandscape = isFullScreen && isHorizonal
|
||||
updateUI(isLandscape: isLandscape)
|
||||
}
|
||||
|
||||
open func updateUI(isLandscape: Bool) {
|
||||
if isLandscape {
|
||||
topMaskView.isHidden = KSOptions.topBarShowInCase == .none
|
||||
} else {
|
||||
topMaskView.isHidden = KSOptions.topBarShowInCase != .always
|
||||
}
|
||||
toolBar.playbackRateButton.isHidden = false
|
||||
toolBar.srtButton.isHidden = srtControl.subtitleInfos.isEmpty
|
||||
if UIDevice.current.userInterfaceIdiom == .phone {
|
||||
if isLandscape {
|
||||
landscapeButton.isHidden = true
|
||||
toolBar.srtButton.isHidden = srtControl.subtitleInfos.isEmpty
|
||||
} else {
|
||||
toolBar.srtButton.isHidden = true
|
||||
if let image = maskImageView.image {
|
||||
landscapeButton.isHidden = image.size.width < image.size.height
|
||||
} else {
|
||||
landscapeButton.isHidden = false
|
||||
}
|
||||
}
|
||||
toolBar.playbackRateButton.isHidden = !isLandscape
|
||||
} else {
|
||||
landscapeButton.isHidden = true
|
||||
}
|
||||
lockButton.isHidden = !isLandscape
|
||||
judgePanGesture()
|
||||
}
|
||||
|
||||
override open func player(layer: KSPlayerLayer, state: KSPlayerState) {
|
||||
super.player(layer: layer, state: state)
|
||||
if state == .readyToPlay {
|
||||
UIView.animate(withDuration: 0.3) {
|
||||
self.maskImageView.alpha = 0.0
|
||||
}
|
||||
}
|
||||
judgePanGesture()
|
||||
}
|
||||
|
||||
override open func player(layer: KSPlayerLayer, currentTime: TimeInterval, totalTime: TimeInterval) {
|
||||
airplayStatusView.isHidden = !layer.player.isExternalPlaybackActive
|
||||
super.player(layer: layer, currentTime: currentTime, totalTime: totalTime)
|
||||
}
|
||||
|
||||
override open func set(resource: KSPlayerResource, definitionIndex: Int = 0, isSetUrl: Bool = true) {
|
||||
super.set(resource: resource, definitionIndex: definitionIndex, isSetUrl: isSetUrl)
|
||||
maskImageView.image(url: resource.cover)
|
||||
}
|
||||
|
||||
override open func change(definitionIndex: Int) {
|
||||
Task {
|
||||
let image = await playerLayer?.player.thumbnailImageAtCurrentTime()
|
||||
if let image {
|
||||
self.maskImageView.image = UIImage(cgImage: image)
|
||||
self.maskImageView.alpha = 1
|
||||
}
|
||||
super.change(definitionIndex: definitionIndex)
|
||||
}
|
||||
}
|
||||
|
||||
override open func panGestureBegan(location point: CGPoint, direction: KSPanDirection) {
|
||||
if direction == .vertical {
|
||||
if point.x > bounds.size.width / 2 {
|
||||
isVolume = true
|
||||
tmpPanValue = volumeViewSlider.value
|
||||
} else {
|
||||
isVolume = false
|
||||
}
|
||||
} else {
|
||||
super.panGestureBegan(location: point, direction: direction)
|
||||
}
|
||||
}
|
||||
|
||||
override open func panGestureChanged(velocity point: CGPoint, direction: KSPanDirection) {
|
||||
if direction == .vertical {
|
||||
if isVolume {
|
||||
if KSOptions.enableVolumeGestures {
|
||||
tmpPanValue += panValue(velocity: point, direction: direction, currentTime: Float(toolBar.currentTime), totalTime: Float(totalTime))
|
||||
tmpPanValue = max(min(tmpPanValue, 1), 0)
|
||||
volumeViewSlider.value = tmpPanValue
|
||||
}
|
||||
} else if KSOptions.enableBrightnessGestures {
|
||||
#if !os(xrOS)
|
||||
brightness += CGFloat(panValue(velocity: point, direction: direction, currentTime: Float(toolBar.currentTime), totalTime: Float(totalTime)))
|
||||
#endif
|
||||
}
|
||||
} else {
|
||||
super.panGestureChanged(velocity: point, direction: direction)
|
||||
}
|
||||
}
|
||||
|
||||
open func judgePanGesture() {
|
||||
if landscapeButton.isSelected || UIDevice.current.userInterfaceIdiom == .pad {
|
||||
panGesture.isEnabled = isPlayed && !replayButton.isSelected
|
||||
} else {
|
||||
panGesture.isEnabled = toolBar.playButton.isSelected
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension IOSVideoPlayerView: UIViewControllerTransitioningDelegate {
|
||||
public func animationController(forPresented _: UIViewController, presenting _: UIViewController, source _: UIViewController) -> UIViewControllerAnimatedTransitioning? {
|
||||
if let originalSuperView, let animationView = playerLayer?.player.view {
|
||||
return PlayerTransitionAnimator(containerView: originalSuperView, animationView: animationView)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
public func animationController(forDismissed _: UIViewController) -> UIViewControllerAnimatedTransitioning? {
|
||||
if let originalSuperView, let animationView = playerLayer?.player.view {
|
||||
return PlayerTransitionAnimator(containerView: originalSuperView, animationView: animationView, isDismiss: true)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - private functions
|
||||
|
||||
extension IOSVideoPlayerView {
|
||||
private func addNotification() {
|
||||
// NotificationCenter.default.addObserver(self, selector: #selector(orientationChanged), name: UIApplication.didChangeStatusBarOrientationNotification, object: nil)
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(routesAvailableDidChange), name: .AVRouteDetectorMultipleRoutesDetectedDidChange, object: nil)
|
||||
}
|
||||
|
||||
@objc private func routesAvailableDidChange(notification _: Notification) {
|
||||
#if !os(xrOS)
|
||||
routeButton.isHidden = !routeDetector.multipleRoutesDetected
|
||||
#endif
|
||||
}
|
||||
|
||||
@objc private func orientationChanged(notification _: Notification) {
|
||||
guard isHorizonal() else {
|
||||
return
|
||||
}
|
||||
updateUI(isFullScreen: UIApplication.isLandscape)
|
||||
}
|
||||
}
|
||||
|
||||
public class AirplayStatusView: UIView {
|
||||
override public init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
let airplayicon = UIImageView(image: UIImage(systemName: "airplayvideo"))
|
||||
addSubview(airplayicon)
|
||||
let airplaymessage = UILabel()
|
||||
airplaymessage.backgroundColor = .clear
|
||||
airplaymessage.textColor = .white
|
||||
airplaymessage.font = .systemFont(ofSize: 14)
|
||||
airplaymessage.text = NSLocalizedString("AirPlay 投放中", comment: "")
|
||||
airplaymessage.textAlignment = .center
|
||||
addSubview(airplaymessage)
|
||||
translatesAutoresizingMaskIntoConstraints = false
|
||||
airplayicon.translatesAutoresizingMaskIntoConstraints = false
|
||||
airplaymessage.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
widthAnchor.constraint(equalToConstant: 100),
|
||||
heightAnchor.constraint(equalToConstant: 115),
|
||||
airplayicon.topAnchor.constraint(equalTo: topAnchor),
|
||||
airplayicon.centerXAnchor.constraint(equalTo: centerXAnchor),
|
||||
airplayicon.widthAnchor.constraint(equalToConstant: 100),
|
||||
airplayicon.heightAnchor.constraint(equalToConstant: 100),
|
||||
airplaymessage.bottomAnchor.constraint(equalTo: bottomAnchor),
|
||||
airplaymessage.leadingAnchor.constraint(equalTo: leadingAnchor),
|
||||
airplaymessage.trailingAnchor.constraint(equalTo: trailingAnchor),
|
||||
airplaymessage.heightAnchor.constraint(equalToConstant: 15),
|
||||
])
|
||||
isHidden = true
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
}
|
||||
|
||||
public extension KSOptions {
|
||||
/// func application(_ application: UIApplication, supportedInterfaceOrientationsFor window: UIWindow?) -> UIInterfaceOrientationMask
|
||||
static var supportedInterfaceOrientations = UIInterfaceOrientationMask.portrait
|
||||
}
|
||||
|
||||
extension UIApplication {
|
||||
static var isLandscape: Bool {
|
||||
UIApplication.shared.windows.first?.windowScene?.interfaceOrientation.isLandscape ?? false
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - menu
|
||||
|
||||
extension IOSVideoPlayerView {
|
||||
override open var canBecomeFirstResponder: Bool {
|
||||
true
|
||||
}
|
||||
|
||||
override open func canPerformAction(_ action: Selector, withSender _: Any?) -> Bool {
|
||||
if action == #selector(IOSVideoPlayerView.openFileAction) {
|
||||
return true
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@objc fileprivate func openFileAction(_: AnyObject) {
|
||||
let documentPicker = UIDocumentPickerViewController(documentTypes: [kUTTypeAudio, kUTTypeMovie, kUTTypePlainText] as [String], in: .open)
|
||||
documentPicker.delegate = self
|
||||
viewController?.present(documentPicker, animated: true, completion: nil)
|
||||
}
|
||||
}
|
||||
|
||||
extension IOSVideoPlayerView: UIDocumentPickerDelegate {
|
||||
public func documentPicker(_: UIDocumentPickerViewController, didPickDocumentsAt urls: [URL]) {
|
||||
if let url = urls.first {
|
||||
if url.isMovie || url.isAudio {
|
||||
set(url: url, options: KSOptions())
|
||||
} else {
|
||||
srtControl.selectedSubtitleInfo = URLSubtitleInfo(url: url)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
#if os(iOS)
|
||||
@MainActor
|
||||
public class MenuController {
|
||||
public init(with builder: UIMenuBuilder) {
|
||||
builder.remove(menu: .format)
|
||||
builder.insertChild(MenuController.openFileMenu(), atStartOfMenu: .file)
|
||||
// builder.insertChild(MenuController.openURLMenu(), atStartOfMenu: .file)
|
||||
// builder.insertChild(MenuController.navigationMenu(), atStartOfMenu: .file)
|
||||
}
|
||||
|
||||
class func openFileMenu() -> UIMenu {
|
||||
let openCommand = UIKeyCommand(input: "O", modifierFlags: .command, action: #selector(IOSVideoPlayerView.openFileAction(_:)))
|
||||
openCommand.title = NSLocalizedString("Open File", comment: "")
|
||||
let openMenu = UIMenu(title: "",
|
||||
image: nil,
|
||||
identifier: UIMenu.Identifier("com.example.apple-samplecode.menus.openFileMenu"),
|
||||
options: .displayInline,
|
||||
children: [openCommand])
|
||||
return openMenu
|
||||
}
|
||||
|
||||
// class func openURLMenu() -> UIMenu {
|
||||
// let openCommand = UIKeyCommand(input: "O", modifierFlags: [.command, .shift], action: #selector(IOSVideoPlayerView.openURLAction(_:)))
|
||||
// openCommand.title = NSLocalizedString("Open URL", comment: "")
|
||||
// let openMenu = UIMenu(title: "",
|
||||
// image: nil,
|
||||
// identifier: UIMenu.Identifier("com.example.apple-samplecode.menus.openURLMenu"),
|
||||
// options: .displayInline,
|
||||
// children: [openCommand])
|
||||
// return openMenu
|
||||
// }
|
||||
// class func navigationMenu() -> UIMenu {
|
||||
// let arrowKeyChildrenCommands = Arrows.allCases.map { arrow in
|
||||
// UIKeyCommand(title: arrow.localizedString(),
|
||||
// image: nil,
|
||||
// action: #selector(IOSVideoPlayerView.navigationMenuAction(_:)),
|
||||
// input: arrow.command,
|
||||
// modifierFlags: .command)
|
||||
// }
|
||||
// return UIMenu(title: NSLocalizedString("NavigationTitle", comment: ""),
|
||||
// image: nil,
|
||||
// identifier: UIMenu.Identifier("com.example.apple-samplecode.menus.navigationMenu"),
|
||||
// options: [],
|
||||
// children: arrowKeyChildrenCommands)
|
||||
// }
|
||||
|
||||
enum Arrows: String, CaseIterable {
|
||||
case rightArrow
|
||||
case leftArrow
|
||||
case upArrow
|
||||
case downArrow
|
||||
func localizedString() -> String {
|
||||
NSLocalizedString("\(rawValue)", comment: "")
|
||||
}
|
||||
|
||||
@MainActor
|
||||
var command: String {
|
||||
switch self {
|
||||
case .rightArrow:
|
||||
return UIKeyCommand.inputRightArrow
|
||||
case .leftArrow:
|
||||
return UIKeyCommand.inputLeftArrow
|
||||
case .upArrow:
|
||||
return UIKeyCommand.inputUpArrow
|
||||
case .downArrow:
|
||||
return UIKeyCommand.inputDownArrow
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
99
KSPlayer-main/Sources/KSPlayer/Video/KSMenu.swift
Normal file
99
KSPlayer-main/Sources/KSPlayer/Video/KSMenu.swift
Normal file
@@ -0,0 +1,99 @@
|
||||
//
|
||||
// KSMenu.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by Alanko5 on 15/12/2022.
|
||||
//
|
||||
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
|
||||
extension UIMenu {
|
||||
func updateActionState(actionTitle: String? = nil) -> UIMenu {
|
||||
for action in children {
|
||||
guard let action = action as? UIAction else {
|
||||
continue
|
||||
}
|
||||
action.state = action.title == actionTitle ? .on : .off
|
||||
}
|
||||
return self
|
||||
}
|
||||
|
||||
@available(tvOS 15.0, *)
|
||||
convenience init?<U>(title: String, current: U?, list: [U], addDisabled: Bool = false, titleFunc: (U) -> String, completition: @escaping (String, U?) -> Void) {
|
||||
if list.count < (addDisabled ? 1 : 2) {
|
||||
return nil
|
||||
}
|
||||
var actions = list.map { value in
|
||||
let item = UIAction(title: titleFunc(value)) { item in
|
||||
completition(item.title, value)
|
||||
}
|
||||
|
||||
if let current, titleFunc(value) == titleFunc(current) {
|
||||
item.state = .on
|
||||
}
|
||||
return item
|
||||
}
|
||||
if addDisabled {
|
||||
actions.insert(UIAction(title: "Disabled") { item in
|
||||
completition(item.title, nil)
|
||||
}, at: 0)
|
||||
}
|
||||
|
||||
self.init(title: title, children: actions)
|
||||
}
|
||||
}
|
||||
|
||||
#if !os(tvOS)
|
||||
extension UIButton {
|
||||
@available(iOS 14.0, *)
|
||||
func setMenu<U>(title: String, current: U?, list: [U], addDisabled: Bool = false, titleFunc: (U) -> String, completition handler: @escaping (U?) -> Void) {
|
||||
menu = UIMenu(title: title, current: current, list: list, addDisabled: addDisabled, titleFunc: titleFunc) { [weak self] title, value in
|
||||
guard let self else { return }
|
||||
handler(value)
|
||||
self.menu = self.menu?.updateActionState(actionTitle: title)
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#if canImport(UIKit)
|
||||
|
||||
#else
|
||||
public typealias UIMenu = NSMenu
|
||||
|
||||
public final class UIAction: NSMenuItem {
|
||||
private let handler: (UIAction) -> Void
|
||||
init(title: String, handler: @escaping (UIAction) -> Void) {
|
||||
self.handler = handler
|
||||
super.init(title: title, action: #selector(menuPressed), keyEquivalent: "")
|
||||
state = .off
|
||||
target = self
|
||||
}
|
||||
|
||||
@objc private func menuPressed() {
|
||||
handler(self)
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
}
|
||||
|
||||
extension UIMenu {
|
||||
var children: [NSMenuItem] {
|
||||
items
|
||||
}
|
||||
|
||||
convenience init(title: String, children: [UIAction]) {
|
||||
self.init(title: title)
|
||||
for item in children {
|
||||
addItem(item)
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
147
KSPlayer-main/Sources/KSPlayer/Video/KSPlayerItem.swift
Normal file
147
KSPlayer-main/Sources/KSPlayer/Video/KSPlayerItem.swift
Normal file
@@ -0,0 +1,147 @@
|
||||
//
|
||||
// KSPlayerItem.swift
|
||||
// Pods
|
||||
//
|
||||
// Created by kintan on 16/5/21.
|
||||
//
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
import MediaPlayer
|
||||
|
||||
public class KSPlayerResource: Equatable, Hashable {
|
||||
public static func == (lhs: KSPlayerResource, rhs: KSPlayerResource) -> Bool {
|
||||
lhs.definitions == rhs.definitions
|
||||
}
|
||||
|
||||
public let name: String
|
||||
public let definitions: [KSPlayerResourceDefinition]
|
||||
public let cover: URL?
|
||||
public let subtitleDataSouce: SubtitleDataSouce?
|
||||
public var nowPlayingInfo: KSNowPlayableMetadata?
|
||||
public let extinf: [String: String]?
|
||||
/**
|
||||
Player recource item with url, used to play single difinition video
|
||||
|
||||
- parameter name: video name
|
||||
- parameter url: video url
|
||||
- parameter cover: video cover, will show before playing, and hide when play
|
||||
- parameter subtitleURLs: video subtitles
|
||||
*/
|
||||
public convenience init(url: URL, options: KSOptions = KSOptions(), name: String = "", cover: URL? = nil, subtitleURLs: [URL]? = nil, extinf: [String: String]? = nil) {
|
||||
let definition = KSPlayerResourceDefinition(url: url, definition: "", options: options)
|
||||
let subtitleDataSouce: URLSubtitleDataSouce?
|
||||
if let subtitleURLs {
|
||||
subtitleDataSouce = URLSubtitleDataSouce(urls: subtitleURLs)
|
||||
} else {
|
||||
subtitleDataSouce = nil
|
||||
}
|
||||
|
||||
self.init(name: name, definitions: [definition], cover: cover, subtitleDataSouce: subtitleDataSouce, extinf: extinf)
|
||||
}
|
||||
|
||||
/**
|
||||
Play resouce with multi definitions
|
||||
|
||||
- parameter name: video name
|
||||
- parameter definitions: video definitions
|
||||
- parameter cover: video cover
|
||||
- parameter subtitle: video subtitle
|
||||
*/
|
||||
public init(name: String, definitions: [KSPlayerResourceDefinition], cover: URL? = nil, subtitleDataSouce: SubtitleDataSouce? = nil, extinf: [String: String]? = nil) {
|
||||
self.name = name
|
||||
self.cover = cover
|
||||
self.subtitleDataSouce = subtitleDataSouce
|
||||
self.definitions = definitions
|
||||
self.extinf = extinf
|
||||
nowPlayingInfo = KSNowPlayableMetadata(title: name)
|
||||
}
|
||||
|
||||
public func hash(into hasher: inout Hasher) {
|
||||
hasher.combine(definitions)
|
||||
}
|
||||
}
|
||||
|
||||
extension KSPlayerResource: Identifiable {
|
||||
public var id: KSPlayerResource { self }
|
||||
}
|
||||
|
||||
public struct KSPlayerResourceDefinition: Hashable {
|
||||
public static func == (lhs: KSPlayerResourceDefinition, rhs: KSPlayerResourceDefinition) -> Bool {
|
||||
lhs.url == rhs.url
|
||||
}
|
||||
|
||||
public let url: URL
|
||||
public let definition: String
|
||||
public let options: KSOptions
|
||||
public init(url: URL) {
|
||||
self.init(url: url, definition: url.lastPathComponent)
|
||||
}
|
||||
|
||||
/**
|
||||
Video recource item with defination name and specifying options
|
||||
|
||||
- parameter url: video url
|
||||
- parameter definition: url deifination
|
||||
- parameter options: specifying options for the initialization of the AVURLAsset
|
||||
*/
|
||||
public init(url: URL, definition: String, options: KSOptions = KSOptions()) {
|
||||
self.url = url
|
||||
self.definition = definition
|
||||
self.options = options
|
||||
}
|
||||
|
||||
public func hash(into hasher: inout Hasher) {
|
||||
hasher.combine(url)
|
||||
}
|
||||
}
|
||||
|
||||
extension KSPlayerResourceDefinition: Identifiable {
|
||||
public var id: Self { self }
|
||||
}
|
||||
|
||||
public struct KSNowPlayableMetadata {
|
||||
private let mediaType: MPNowPlayingInfoMediaType?
|
||||
private let isLiveStream: Bool?
|
||||
private let title: String
|
||||
private let artist: String?
|
||||
private let artwork: MPMediaItemArtwork?
|
||||
private let albumArtist: String?
|
||||
private let albumTitle: String?
|
||||
var nowPlayingInfo: [String: Any] {
|
||||
var nowPlayingInfo = [String: Any]()
|
||||
nowPlayingInfo[MPNowPlayingInfoPropertyMediaType] = mediaType?.rawValue
|
||||
nowPlayingInfo[MPNowPlayingInfoPropertyIsLiveStream] = isLiveStream
|
||||
nowPlayingInfo[MPMediaItemPropertyTitle] = title
|
||||
nowPlayingInfo[MPMediaItemPropertyArtist] = artist
|
||||
if #available(OSX 10.13.2, *) {
|
||||
nowPlayingInfo[MPMediaItemPropertyArtwork] = artwork
|
||||
}
|
||||
nowPlayingInfo[MPMediaItemPropertyAlbumArtist] = albumArtist
|
||||
nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = albumTitle
|
||||
return nowPlayingInfo
|
||||
}
|
||||
|
||||
public init(mediaType: MPNowPlayingInfoMediaType? = nil, isLiveStream: Bool? = nil, title: String, artist: String? = nil,
|
||||
artwork: MPMediaItemArtwork? = nil, albumArtist: String? = nil, albumTitle: String? = nil)
|
||||
{
|
||||
self.mediaType = mediaType
|
||||
self.isLiveStream = isLiveStream
|
||||
self.title = title
|
||||
self.artist = artist
|
||||
self.artwork = artwork
|
||||
self.albumArtist = albumArtist
|
||||
self.albumTitle = albumTitle
|
||||
}
|
||||
|
||||
public init(mediaType: MPNowPlayingInfoMediaType? = nil, isLiveStream: Bool? = nil, title: String, artist: String? = nil, image: UIImage, albumArtist: String? = nil, albumTitle: String? = nil) {
|
||||
self.mediaType = mediaType
|
||||
self.isLiveStream = isLiveStream
|
||||
self.title = title
|
||||
self.artist = artist
|
||||
self.albumArtist = albumArtist
|
||||
self.albumTitle = albumTitle
|
||||
artwork = MPMediaItemArtwork(boundsSize: image.size) { _ in image }
|
||||
}
|
||||
}
|
||||
209
KSPlayer-main/Sources/KSPlayer/Video/MacVideoPlayerView.swift
Normal file
209
KSPlayer-main/Sources/KSPlayer/Video/MacVideoPlayerView.swift
Normal file
@@ -0,0 +1,209 @@
|
||||
//
|
||||
// MacVideoPlayerView.swift
|
||||
// Pods
|
||||
//
|
||||
// Created by kintan on 2018/10/31.
|
||||
//
|
||||
#if !canImport(UIKit)
|
||||
|
||||
import AppKit
|
||||
import AVFoundation
|
||||
|
||||
public extension NSPasteboard.PasteboardType {
|
||||
static let nsURL = NSPasteboard.PasteboardType("NSURL")
|
||||
static let nsFilenames = NSPasteboard.PasteboardType("NSFilenamesPboardType")
|
||||
}
|
||||
|
||||
public extension NSDraggingInfo {
|
||||
@MainActor
|
||||
func getUrl() -> URL? {
|
||||
guard let types = draggingPasteboard.types else { return nil }
|
||||
|
||||
if types.contains(.nsFilenames) {
|
||||
guard let paths = draggingPasteboard.propertyList(forType: .nsFilenames) as? [String] else { return nil }
|
||||
let urls = paths.map { URL(fileURLWithPath: $0) }
|
||||
return urls.first
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
open class MacVideoPlayerView: VideoPlayerView {
|
||||
override open func customizeUIComponents() {
|
||||
super.customizeUIComponents()
|
||||
registerForDraggedTypes([.nsFilenames, .nsURL, .string])
|
||||
}
|
||||
|
||||
override open func player(layer: KSPlayerLayer, state: KSPlayerState) {
|
||||
super.player(layer: layer, state: state)
|
||||
if state == .readyToPlay {
|
||||
let naturalSize = layer.player.naturalSize
|
||||
window?.aspectRatio = naturalSize
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MacVideoPlayerView {
|
||||
override open func updateTrackingAreas() {
|
||||
for trackingArea in trackingAreas {
|
||||
removeTrackingArea(trackingArea)
|
||||
}
|
||||
let trackingArea = NSTrackingArea(rect: bounds, options: [.mouseEnteredAndExited, .mouseMoved, .activeInKeyWindow], owner: self, userInfo: nil)
|
||||
addTrackingArea(trackingArea)
|
||||
}
|
||||
|
||||
override open func mouseEntered(with _: NSEvent) {
|
||||
isMaskShow = true
|
||||
}
|
||||
|
||||
override open func mouseMoved(with _: NSEvent) {
|
||||
isMaskShow = true
|
||||
}
|
||||
|
||||
override open func mouseExited(with _: NSEvent) {
|
||||
isMaskShow = false
|
||||
}
|
||||
|
||||
override open func scrollWheel(with event: NSEvent) {
|
||||
if event.phase.contains(.began) {
|
||||
if event.scrollingDeltaX != 0 {
|
||||
scrollDirection = .horizontal
|
||||
tmpPanValue = toolBar.timeSlider.value
|
||||
} else if event.scrollingDeltaY != 0 {
|
||||
scrollDirection = .vertical
|
||||
tmpPanValue = 1
|
||||
}
|
||||
} else if event.phase.contains(.changed) {
|
||||
let delta = scrollDirection == .horizontal ? event.scrollingDeltaX : event.scrollingDeltaY
|
||||
if scrollDirection == .horizontal {
|
||||
tmpPanValue += Float(delta / 10000) * Float(totalTime)
|
||||
showSeekToView(second: Double(tmpPanValue), isAdd: delta > 0)
|
||||
} else {
|
||||
if KSOptions.enableVolumeGestures {
|
||||
tmpPanValue -= Float(delta / 1000)
|
||||
tmpPanValue = max(min(tmpPanValue, 1), 0)
|
||||
}
|
||||
}
|
||||
} else if event.phase.contains(.ended) {
|
||||
if scrollDirection == .horizontal {
|
||||
slider(value: Double(tmpPanValue), event: .touchUpInside)
|
||||
hideSeekToView()
|
||||
} else {
|
||||
if KSOptions.enableVolumeGestures {
|
||||
playerLayer?.player.playbackVolume = tmpPanValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override open var acceptsFirstResponder: Bool {
|
||||
true
|
||||
}
|
||||
|
||||
override open func keyDown(with event: NSEvent) {
|
||||
if let specialKey = event.specialKey {
|
||||
if specialKey == .rightArrow {
|
||||
slider(value: Double(toolBar.timeSlider.value) + 0.01 * totalTime, event: .touchUpInside)
|
||||
} else if specialKey == .leftArrow {
|
||||
slider(value: Double(toolBar.timeSlider.value) - 0.01 * totalTime, event: .touchUpInside)
|
||||
}
|
||||
} else if let character = event.characters?.first {
|
||||
if character == " " {
|
||||
onButtonPressed(toolBar.playButton)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override open func draggingEntered(_: NSDraggingInfo) -> NSDragOperation {
|
||||
.copy
|
||||
}
|
||||
|
||||
override open func performDragOperation(_ sender: NSDraggingInfo) -> Bool {
|
||||
if let url = sender.getUrl() {
|
||||
if url.isMovie || url.isAudio {
|
||||
set(resource: KSPlayerResource(url: url, options: KSOptions()))
|
||||
return true
|
||||
} else {
|
||||
srtControl.selectedSubtitleInfo = URLSubtitleInfo(url: url)
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
class UIActivityIndicatorView: UIView {
|
||||
private let loadingView = NSView()
|
||||
private let progressLabel = UILabel()
|
||||
public var progress: Double = 0 {
|
||||
didSet {
|
||||
print("new progress: \(progress)")
|
||||
progressLabel.stringValue = "\(Int(progress * 100))%"
|
||||
}
|
||||
}
|
||||
|
||||
override init(frame frameRect: CGRect) {
|
||||
super.init(frame: frameRect)
|
||||
wantsLayer = true
|
||||
backingLayer?.backgroundColor = UIColor(white: 0, alpha: 0.2).cgColor
|
||||
setupLoadingView()
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
private func setupLoadingView() {
|
||||
loadingView.wantsLayer = true
|
||||
addSubview(loadingView)
|
||||
let imageView = NSImageView()
|
||||
imageView.image = KSOptions.image(named: "loading")
|
||||
loadingView.addSubview(imageView)
|
||||
imageView.imageScaling = .scaleAxesIndependently
|
||||
imageView.translatesAutoresizingMaskIntoConstraints = false
|
||||
loadingView.translatesAutoresizingMaskIntoConstraints = false
|
||||
translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
widthAnchor.constraint(equalToConstant: 110),
|
||||
heightAnchor.constraint(equalToConstant: 110),
|
||||
loadingView.centerXAnchor.constraint(equalTo: centerXAnchor),
|
||||
loadingView.centerYAnchor.constraint(equalTo: centerYAnchor),
|
||||
loadingView.widthAnchor.constraint(equalTo: widthAnchor),
|
||||
loadingView.heightAnchor.constraint(equalTo: heightAnchor),
|
||||
imageView.bottomAnchor.constraint(equalTo: loadingView.bottomAnchor),
|
||||
imageView.leadingAnchor.constraint(equalTo: loadingView.leadingAnchor),
|
||||
imageView.heightAnchor.constraint(equalTo: widthAnchor),
|
||||
imageView.widthAnchor.constraint(equalTo: heightAnchor),
|
||||
])
|
||||
progressLabel.alignment = .center
|
||||
progressLabel.font = NSFont.systemFont(ofSize: 18, weight: .medium)
|
||||
addSubview(progressLabel)
|
||||
progressLabel.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
progressLabel.centerXAnchor.constraint(equalTo: centerXAnchor),
|
||||
progressLabel.topAnchor.constraint(equalTo: loadingView.bottomAnchor, constant: 20),
|
||||
progressLabel.widthAnchor.constraint(equalToConstant: 100),
|
||||
progressLabel.heightAnchor.constraint(equalToConstant: 22),
|
||||
])
|
||||
startAnimating()
|
||||
}
|
||||
}
|
||||
|
||||
extension UIActivityIndicatorView: LoadingIndector {
|
||||
func startAnimating() {
|
||||
loadingView.backingLayer?.position = CGPoint(x: loadingView.layer!.frame.midX, y: loadingView.layer!.frame.midY)
|
||||
loadingView.backingLayer?.anchorPoint = CGPoint(x: 0.5, y: 0.5)
|
||||
let rotationAnimation = CABasicAnimation(keyPath: "transform.rotation.z")
|
||||
rotationAnimation.duration = 1.0
|
||||
rotationAnimation.repeatCount = MAXFLOAT
|
||||
rotationAnimation.fromValue = 0.0
|
||||
rotationAnimation.toValue = Float.pi * -2
|
||||
loadingView.backingLayer?.add(rotationAnimation, forKey: "loading")
|
||||
}
|
||||
|
||||
func stopAnimating() {
|
||||
loadingView.backingLayer?.removeAnimation(forKey: "loading")
|
||||
}
|
||||
}
|
||||
#endif
|
||||
@@ -0,0 +1,69 @@
|
||||
//
|
||||
// PlayerFullScreenViewController.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2021/8/20.
|
||||
//
|
||||
#if canImport(UIKit) && !os(tvOS)
|
||||
|
||||
import UIKit
|
||||
|
||||
protocol PlayerViewFullScreenDelegate: AnyObject {
|
||||
func player(isMaskShow: Bool, isFullScreen: Bool)
|
||||
}
|
||||
|
||||
class PlayerFullScreenViewController: UIViewController {
|
||||
private let isHorizonal: Bool
|
||||
private var statusHiden = false
|
||||
init(isHorizonal: Bool) {
|
||||
self.isHorizonal = isHorizonal
|
||||
super.init(nibName: nil, bundle: nil)
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
KSOptions.supportedInterfaceOrientations = isHorizonal ? .landscapeRight : .portrait
|
||||
}
|
||||
|
||||
override func viewWillAppear(_ animated: Bool) {
|
||||
super.viewWillAppear(animated)
|
||||
navigationController?.navigationBar.isHidden = true
|
||||
UIApplication.shared.isIdleTimerDisabled = true
|
||||
}
|
||||
|
||||
override var shouldAutorotate: Bool {
|
||||
KSOptions.supportedInterfaceOrientations == .all
|
||||
}
|
||||
|
||||
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
|
||||
.all
|
||||
}
|
||||
|
||||
override var prefersHomeIndicatorAutoHidden: Bool {
|
||||
true
|
||||
}
|
||||
|
||||
override var preferredStatusBarStyle: UIStatusBarStyle {
|
||||
.lightContent
|
||||
}
|
||||
|
||||
override var prefersStatusBarHidden: Bool {
|
||||
statusHiden
|
||||
}
|
||||
}
|
||||
|
||||
extension PlayerFullScreenViewController: PlayerViewFullScreenDelegate {
|
||||
func player(isMaskShow: Bool, isFullScreen: Bool) {
|
||||
if isFullScreen {
|
||||
statusHiden = !isMaskShow
|
||||
setNeedsFocusUpdate()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,71 @@
|
||||
//
|
||||
// PlayerTransitionAnimator.swift
|
||||
// KSPlayer
|
||||
//
|
||||
// Created by kintan on 2021/8/20.
|
||||
//
|
||||
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
|
||||
class PlayerTransitionAnimator: NSObject, UIViewControllerAnimatedTransitioning {
|
||||
private let isDismiss: Bool
|
||||
private let containerView: UIView
|
||||
private let animationView: UIView
|
||||
private let fromCenter: CGPoint
|
||||
init(containerView: UIView, animationView: UIView, isDismiss: Bool = false) {
|
||||
self.containerView = containerView
|
||||
self.animationView = animationView
|
||||
self.isDismiss = isDismiss
|
||||
fromCenter = containerView.superview?.convert(containerView.center, to: nil) ?? .zero
|
||||
super.init()
|
||||
}
|
||||
|
||||
func transitionDuration(using _: UIViewControllerContextTransitioning?) -> TimeInterval {
|
||||
0.3
|
||||
}
|
||||
|
||||
func animateTransition(using transitionContext: UIViewControllerContextTransitioning) {
|
||||
let animationSuperView = animationView.superview
|
||||
let animationViewIndex = animationSuperView?.subviews.firstIndex(of: animationView) ?? 0
|
||||
let initSize = animationView.frame.size
|
||||
let animationFrameConstraints = animationView.frameConstraints
|
||||
guard let presentedView = transitionContext.view(forKey: isDismiss ? .from : .to) else {
|
||||
return
|
||||
}
|
||||
if isDismiss {
|
||||
containerView.layoutIfNeeded()
|
||||
presentedView.bounds = containerView.bounds
|
||||
presentedView.removeFromSuperview()
|
||||
} else {
|
||||
if let viewController = transitionContext.viewController(forKey: .to) {
|
||||
presentedView.frame = transitionContext.finalFrame(for: viewController)
|
||||
}
|
||||
}
|
||||
presentedView.layoutIfNeeded()
|
||||
transitionContext.containerView.addSubview(animationView)
|
||||
animationView.translatesAutoresizingMaskIntoConstraints = true
|
||||
guard let transform = transitionContext.viewController(forKey: .from)?.view.transform else {
|
||||
return
|
||||
}
|
||||
animationView.transform = CGAffineTransform(scaleX: initSize.width / animationView.frame.size.width, y: initSize.height / animationView.frame.size.height).concatenating(transform)
|
||||
let toCenter = transitionContext.containerView.center
|
||||
let fromCenter = transform == .identity ? fromCenter : fromCenter.reverse
|
||||
animationView.center = isDismiss ? toCenter : fromCenter
|
||||
UIView.animate(withDuration: transitionDuration(using: transitionContext), delay: 0, options: .curveEaseInOut) {
|
||||
self.animationView.transform = .identity
|
||||
self.animationView.center = self.isDismiss ? fromCenter : toCenter
|
||||
} completion: { _ in
|
||||
animationSuperView?.insertSubview(self.animationView, at: animationViewIndex)
|
||||
if !animationFrameConstraints.isEmpty {
|
||||
self.animationView.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate(animationFrameConstraints)
|
||||
}
|
||||
if !self.isDismiss {
|
||||
transitionContext.containerView.addSubview(presentedView)
|
||||
}
|
||||
transitionContext.completeTransition(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
75
KSPlayer-main/Sources/KSPlayer/Video/SeekView.swift
Normal file
75
KSPlayer-main/Sources/KSPlayer/Video/SeekView.swift
Normal file
@@ -0,0 +1,75 @@
|
||||
//
|
||||
// SeekView.swift
|
||||
// KSPlayer-iOS
|
||||
//
|
||||
// Created by kintan on 2018/11/14.
|
||||
//
|
||||
#if canImport(UIKit)
|
||||
import UIKit
|
||||
#else
|
||||
import AppKit
|
||||
#endif
|
||||
public protocol SeekViewProtocol {
|
||||
func set(text: String, isAdd: Bool)
|
||||
}
|
||||
|
||||
class SeekView: UIView {
|
||||
private let seekToViewImage = UIImageView()
|
||||
private let seekToLabel = UILabel()
|
||||
override public init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
addSubview(seekToViewImage)
|
||||
addSubview(seekToLabel)
|
||||
seekToLabel.font = .systemFont(ofSize: 13)
|
||||
seekToLabel.textColor = UIColor(red: 0.9098, green: 0.9098, blue: 0.9098, alpha: 1.0)
|
||||
backgroundColor = UIColor(red: 0.0, green: 0.0, blue: 0.0, alpha: 0.7)
|
||||
cornerRadius = 4
|
||||
clipsToBounds = true
|
||||
isHidden = true
|
||||
if #available(macOS 11.0, *) {
|
||||
seekToViewImage.image = UIImage(systemName: "forward.fill")
|
||||
}
|
||||
translatesAutoresizingMaskIntoConstraints = false
|
||||
seekToViewImage.translatesAutoresizingMaskIntoConstraints = false
|
||||
seekToLabel.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
seekToViewImage.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 15),
|
||||
seekToViewImage.centerYAnchor.constraint(equalTo: centerYAnchor),
|
||||
seekToViewImage.widthAnchor.constraint(equalToConstant: 25),
|
||||
seekToViewImage.heightAnchor.constraint(equalToConstant: 15),
|
||||
seekToLabel.leadingAnchor.constraint(equalTo: seekToViewImage.trailingAnchor, constant: 10),
|
||||
seekToLabel.centerYAnchor.constraint(equalTo: centerYAnchor),
|
||||
])
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
#if canImport(AppKit)
|
||||
var backgroundColor: UIColor? {
|
||||
get {
|
||||
if let layer, let cgColor = layer.backgroundColor {
|
||||
return UIColor(cgColor: cgColor)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
set {
|
||||
backingLayer?.backgroundColor = newValue?.cgColor
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
extension SeekView: SeekViewProtocol {
|
||||
public func set(text: String, isAdd: Bool) {
|
||||
seekToLabel.text = text
|
||||
if !isAdd {
|
||||
seekToViewImage.backingLayer?.position = CGPoint(x: seekToViewImage.backingLayer!.frame.midX, y: seekToViewImage.backingLayer!.frame.midY)
|
||||
seekToViewImage.backingLayer?.anchorPoint = CGPoint(x: 0.5, y: 0.5)
|
||||
}
|
||||
seekToViewImage.centerRotate(byDegrees: isAdd ? 0.0 : 180)
|
||||
}
|
||||
}
|
||||
1054
KSPlayer-main/Sources/KSPlayer/Video/VideoPlayerView.swift
Normal file
1054
KSPlayer-main/Sources/KSPlayer/Video/VideoPlayerView.swift
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user