Initial commit: SimVision tvOS streaming app

Features:
- VOD library with movie grouping and version detection
- TV show library with season/episode organization
- TMDB integration for trending shows and recently aired episodes
- Recent releases section with TMDB release date sorting
- Watch history tracking with continue watching
- Playlist caching (12-hour TTL) for offline support
- M3U playlist parsing with XStream API support
- Authentication with credential storage

Technical:
- SwiftUI for tvOS
- Actor-based services for thread safety
- Persistent caching for playlists, TMDB data, and watch history
- KSPlayer integration for video playback

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-21 22:12:08 -06:00
commit 872354b834
283 changed files with 338296 additions and 0 deletions

View File

@@ -0,0 +1,592 @@
import AVFoundation
import AVKit
#if canImport(UIKit)
import UIKit
#else
import AppKit
public typealias UIImage = NSImage
#endif
import Combine
import CoreGraphics
public final class KSAVPlayerView: UIView {
public let player = AVQueuePlayer()
override public init(frame: CGRect) {
super.init(frame: frame)
#if !canImport(UIKit)
layer = AVPlayerLayer()
#endif
playerLayer.player = player
player.automaticallyWaitsToMinimizeStalling = false
}
@available(*, unavailable)
public required init?(coder _: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override public var contentMode: UIViewContentMode {
get {
switch playerLayer.videoGravity {
case .resize:
return .scaleToFill
case .resizeAspect:
return .scaleAspectFit
case .resizeAspectFill:
return .scaleAspectFill
default:
return .scaleAspectFit
}
}
set {
switch newValue {
case .scaleToFill:
playerLayer.videoGravity = .resize
case .scaleAspectFit:
playerLayer.videoGravity = .resizeAspect
case .scaleAspectFill:
playerLayer.videoGravity = .resizeAspectFill
case .center:
playerLayer.videoGravity = .resizeAspect
default:
break
}
}
}
#if canImport(UIKit)
override public class var layerClass: AnyClass { AVPlayerLayer.self }
#endif
fileprivate var playerLayer: AVPlayerLayer {
// swiftlint:disable force_cast
layer as! AVPlayerLayer
// swiftlint:enable force_cast
}
}
@MainActor
public class KSAVPlayer {
private var cancellable: AnyCancellable?
private var options: KSOptions {
didSet {
player.currentItem?.preferredForwardBufferDuration = options.preferredForwardBufferDuration
cancellable = options.$preferredForwardBufferDuration.sink { [weak self] newValue in
self?.player.currentItem?.preferredForwardBufferDuration = newValue
}
}
}
private let playerView = KSAVPlayerView()
private var urlAsset: AVURLAsset
private var shouldSeekTo = TimeInterval(0)
private var playerLooper: AVPlayerLooper?
private var statusObservation: NSKeyValueObservation?
private var loadedTimeRangesObservation: NSKeyValueObservation?
private var bufferEmptyObservation: NSKeyValueObservation?
private var likelyToKeepUpObservation: NSKeyValueObservation?
private var bufferFullObservation: NSKeyValueObservation?
private var itemObservation: NSKeyValueObservation?
private var loopCountObservation: NSKeyValueObservation?
private var loopStatusObservation: NSKeyValueObservation?
private var mediaPlayerTracks = [AVMediaPlayerTrack]()
private var error: Error? {
didSet {
if let error {
delegate?.finish(player: self, error: error)
}
}
}
private lazy var _pipController: Any? = {
if #available(tvOS 14.0, *) {
let pip = KSPictureInPictureController(playerLayer: playerView.playerLayer)
return pip
} else {
return nil
}
}()
@available(tvOS 14.0, *)
public var pipController: KSPictureInPictureController? {
_pipController as? KSPictureInPictureController
}
public var naturalSize: CGSize = .zero
public let dynamicInfo: DynamicInfo? = nil
@available(macOS 12.0, iOS 15.0, tvOS 15.0, *)
public var playbackCoordinator: AVPlaybackCoordinator {
playerView.player.playbackCoordinator
}
public private(set) var bufferingProgress = 0 {
didSet {
delegate?.changeBuffering(player: self, progress: bufferingProgress)
}
}
public weak var delegate: MediaPlayerDelegate?
public private(set) var duration: TimeInterval = 0
public private(set) var fileSize: Double = 0
public private(set) var playableTime: TimeInterval = 0
public let chapters: [Chapter] = []
public var playbackRate: Float = 1 {
didSet {
if playbackState == .playing {
player.rate = playbackRate
}
}
}
public var playbackVolume: Float = 1.0 {
didSet {
if player.volume != playbackVolume {
player.volume = playbackVolume
}
}
}
public private(set) var loadState = MediaLoadState.idle {
didSet {
if loadState != oldValue {
playOrPause()
if loadState == .loading || loadState == .idle {
bufferingProgress = 0
}
}
}
}
public private(set) var playbackState = MediaPlaybackState.idle {
didSet {
if playbackState != oldValue {
playOrPause()
if playbackState == .finished {
delegate?.finish(player: self, error: nil)
}
}
}
}
public private(set) var isReadyToPlay = false {
didSet {
if isReadyToPlay != oldValue {
if isReadyToPlay {
options.readyTime = CACurrentMediaTime()
delegate?.readyToPlay(player: self)
}
}
}
}
#if os(xrOS)
public var allowsExternalPlayback = false
public var usesExternalPlaybackWhileExternalScreenIsActive = false
public let isExternalPlaybackActive = false
#else
public var allowsExternalPlayback: Bool {
get {
player.allowsExternalPlayback
}
set {
player.allowsExternalPlayback = newValue
}
}
#if os(macOS)
public var usesExternalPlaybackWhileExternalScreenIsActive = false
#else
public var usesExternalPlaybackWhileExternalScreenIsActive: Bool {
get {
player.usesExternalPlaybackWhileExternalScreenIsActive
}
set {
player.usesExternalPlaybackWhileExternalScreenIsActive = newValue
}
}
#endif
public var isExternalPlaybackActive: Bool {
player.isExternalPlaybackActive
}
#endif
public required init(url: URL, options: KSOptions) {
KSOptions.setAudioSession()
urlAsset = AVURLAsset(url: url, options: options.avOptions)
self.options = options
itemObservation = player.observe(\.currentItem) { [weak self] player, _ in
guard let self else { return }
self.observer(playerItem: player.currentItem)
}
}
}
extension KSAVPlayer {
public var player: AVQueuePlayer { playerView.player }
public var playerLayer: AVPlayerLayer { playerView.playerLayer }
@objc private func moviePlayDidEnd(notification _: Notification) {
if !options.isLoopPlay {
playbackState = .finished
}
}
@objc private func playerItemFailedToPlayToEndTime(notification: Notification) {
var playError: Error?
if let userInfo = notification.userInfo {
if let error = userInfo["error"] as? Error {
playError = error
} else if let error = userInfo[AVPlayerItemFailedToPlayToEndTimeErrorKey] as? NSError {
playError = error
} else if let errorCode = (userInfo["error"] as? NSNumber)?.intValue {
playError = NSError(domain: "AVMoviePlayer", code: errorCode, userInfo: nil)
}
}
delegate?.finish(player: self, error: playError)
}
private func updateStatus(item: AVPlayerItem) {
if item.status == .readyToPlay {
options.findTime = CACurrentMediaTime()
mediaPlayerTracks = item.tracks.map {
AVMediaPlayerTrack(track: $0)
}
let playableVideo = mediaPlayerTracks.first {
$0.mediaType == .video && $0.isPlayable
}
if let playableVideo {
naturalSize = playableVideo.naturalSize
} else {
error = NSError(errorCode: .videoTracksUnplayable)
return
}
//
item.tracks.filter { $0.assetTrack?.mediaType.rawValue == AVMediaType.audio.rawValue }.dropFirst().forEach { $0.isEnabled = false }
duration = item.duration.seconds
let estimatedDataRates = item.tracks.compactMap { $0.assetTrack?.estimatedDataRate }
fileSize = Double(estimatedDataRates.reduce(0, +)) * duration / 8
isReadyToPlay = true
} else if item.status == .failed {
error = item.error
}
}
private func updatePlayableDuration(item: AVPlayerItem) {
let first = item.loadedTimeRanges.first { CMTimeRangeContainsTime($0.timeRangeValue, time: item.currentTime()) }
if let first {
playableTime = first.timeRangeValue.end.seconds
guard playableTime > 0 else { return }
let loadedTime = playableTime - currentPlaybackTime
guard loadedTime > 0 else { return }
bufferingProgress = Int(min(loadedTime * 100 / item.preferredForwardBufferDuration, 100))
if bufferingProgress >= 100 {
loadState = .playable
}
}
}
private func playOrPause() {
if playbackState == .playing {
if loadState == .playable {
player.play()
player.rate = playbackRate
}
} else {
player.pause()
}
delegate?.changeLoadState(player: self)
}
private func replaceCurrentItem(playerItem: AVPlayerItem?) {
player.currentItem?.cancelPendingSeeks()
if options.isLoopPlay {
loopCountObservation?.invalidate()
loopStatusObservation?.invalidate()
playerLooper?.disableLooping()
guard let playerItem else {
playerLooper = nil
return
}
playerLooper = AVPlayerLooper(player: player, templateItem: playerItem)
loopCountObservation = playerLooper?.observe(\.loopCount) { [weak self] playerLooper, _ in
guard let self else { return }
self.delegate?.playBack(player: self, loopCount: playerLooper.loopCount)
}
loopStatusObservation = playerLooper?.observe(\.status) { [weak self] playerLooper, _ in
guard let self else { return }
if playerLooper.status == .failed {
self.error = playerLooper.error
}
}
} else {
player.replaceCurrentItem(with: playerItem)
}
}
private func observer(playerItem: AVPlayerItem?) {
NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playerItem)
NotificationCenter.default.removeObserver(self, name: .AVPlayerItemFailedToPlayToEndTime, object: playerItem)
statusObservation?.invalidate()
loadedTimeRangesObservation?.invalidate()
bufferEmptyObservation?.invalidate()
likelyToKeepUpObservation?.invalidate()
bufferFullObservation?.invalidate()
guard let playerItem else { return }
NotificationCenter.default.addObserver(self, selector: #selector(moviePlayDidEnd), name: .AVPlayerItemDidPlayToEndTime, object: playerItem)
NotificationCenter.default.addObserver(self, selector: #selector(playerItemFailedToPlayToEndTime), name: .AVPlayerItemFailedToPlayToEndTime, object: playerItem)
statusObservation = playerItem.observe(\.status) { [weak self] item, _ in
guard let self else { return }
self.updateStatus(item: item)
}
loadedTimeRangesObservation = playerItem.observe(\.loadedTimeRanges) { [weak self] item, _ in
guard let self else { return }
//
self.updatePlayableDuration(item: item)
}
let changeHandler: (AVPlayerItem, NSKeyValueObservedChange<Bool>) -> Void = { [weak self] _, _ in
guard let self else { return }
// 线
if playerItem.isPlaybackBufferEmpty {
self.loadState = .loading
} else if playerItem.isPlaybackLikelyToKeepUp || playerItem.isPlaybackBufferFull {
self.loadState = .playable
}
}
bufferEmptyObservation = playerItem.observe(\.isPlaybackBufferEmpty, changeHandler: changeHandler)
likelyToKeepUpObservation = playerItem.observe(\.isPlaybackLikelyToKeepUp, changeHandler: changeHandler)
bufferFullObservation = playerItem.observe(\.isPlaybackBufferFull, changeHandler: changeHandler)
}
}
extension KSAVPlayer: MediaPlayerProtocol {
public var subtitleDataSouce: SubtitleDataSouce? { nil }
public var isPlaying: Bool { player.rate > 0 ? true : playbackState == .playing }
public var view: UIView? { playerView }
public var currentPlaybackTime: TimeInterval {
get {
if shouldSeekTo > 0 {
return TimeInterval(shouldSeekTo)
} else {
//
return isReadyToPlay ? player.currentTime().seconds : 0
}
}
set {
seek(time: newValue) { _ in
}
}
}
public var numberOfBytesTransferred: Int64 {
guard let playerItem = player.currentItem, let accesslog = playerItem.accessLog(), let event = accesslog.events.first else {
return 0
}
return event.numberOfBytesTransferred
}
public func thumbnailImageAtCurrentTime() async -> CGImage? {
guard let playerItem = player.currentItem, isReadyToPlay else {
return nil
}
return await withCheckedContinuation { continuation in
urlAsset.thumbnailImage(currentTime: playerItem.currentTime()) { result in
continuation.resume(returning: result)
}
}
}
public func seek(time: TimeInterval, completion: @escaping ((Bool) -> Void)) {
let time = max(time, 0)
shouldSeekTo = time
playbackState = .seeking
runOnMainThread { [weak self] in
self?.bufferingProgress = 0
}
let tolerance: CMTime = options.isAccurateSeek ? .zero : .positiveInfinity
player.seek(to: CMTime(seconds: time), toleranceBefore: tolerance, toleranceAfter: tolerance) {
[weak self] finished in
guard let self else { return }
self.shouldSeekTo = 0
completion(finished)
}
}
public func prepareToPlay() {
KSLog("prepareToPlay \(self)")
options.prepareTime = CACurrentMediaTime()
runOnMainThread { [weak self] in
guard let self else { return }
self.bufferingProgress = 0
let playerItem = AVPlayerItem(asset: self.urlAsset)
self.options.openTime = CACurrentMediaTime()
self.replaceCurrentItem(playerItem: playerItem)
self.player.actionAtItemEnd = .pause
self.player.volume = self.playbackVolume
}
}
public func play() {
KSLog("play \(self)")
playbackState = .playing
}
public func pause() {
KSLog("pause \(self)")
playbackState = .paused
}
public func shutdown() {
KSLog("shutdown \(self)")
isReadyToPlay = false
playbackState = .stopped
loadState = .idle
urlAsset.cancelLoading()
replaceCurrentItem(playerItem: nil)
}
public func replace(url: URL, options: KSOptions) {
KSLog("replaceUrl \(self)")
shutdown()
urlAsset = AVURLAsset(url: url, options: options.avOptions)
self.options = options
}
public var contentMode: UIViewContentMode {
get {
playerView.contentMode
}
set {
playerView.contentMode = newValue
}
}
public func enterBackground() {
playerView.playerLayer.player = nil
}
public func enterForeground() {
playerView.playerLayer.player = playerView.player
}
public var seekable: Bool {
!(player.currentItem?.seekableTimeRanges.isEmpty ?? true)
}
public var isMuted: Bool {
get {
player.isMuted
}
set {
player.isMuted = newValue
}
}
public func tracks(mediaType: AVFoundation.AVMediaType) -> [MediaPlayerTrack] {
player.currentItem?.tracks.filter { $0.assetTrack?.mediaType == mediaType }.map { AVMediaPlayerTrack(track: $0) } ?? []
}
public func select(track: some MediaPlayerTrack) {
player.currentItem?.tracks.filter { $0.assetTrack?.mediaType == track.mediaType }.forEach { $0.isEnabled = false }
track.isEnabled = true
}
}
extension AVFoundation.AVMediaType {
var mediaCharacteristic: AVMediaCharacteristic {
switch self {
case .video:
return .visual
case .audio:
return .audible
case .subtitle:
return .legible
default:
return .easyToRead
}
}
}
extension AVAssetTrack {
func toMediaPlayerTrack() {}
}
class AVMediaPlayerTrack: MediaPlayerTrack {
let formatDescription: CMFormatDescription?
let description: String
private let track: AVPlayerItemTrack
var nominalFrameRate: Float
let trackID: Int32
let rotation: Int16 = 0
let bitDepth: Int32
let bitRate: Int64
let name: String
let languageCode: String?
let mediaType: AVFoundation.AVMediaType
let isImageSubtitle = false
var dovi: DOVIDecoderConfigurationRecord?
let fieldOrder: FFmpegFieldOrder = .unknown
var isPlayable: Bool
@MainActor
var isEnabled: Bool {
get {
track.isEnabled
}
set {
track.isEnabled = newValue
}
}
init(track: AVPlayerItemTrack) {
self.track = track
trackID = track.assetTrack?.trackID ?? 0
mediaType = track.assetTrack?.mediaType ?? .video
name = track.assetTrack?.languageCode ?? ""
languageCode = track.assetTrack?.languageCode
nominalFrameRate = track.assetTrack?.nominalFrameRate ?? 24.0
bitRate = Int64(track.assetTrack?.estimatedDataRate ?? 0)
#if os(xrOS)
isPlayable = false
#else
isPlayable = track.assetTrack?.isPlayable ?? false
#endif
// swiftlint:disable force_cast
if let first = track.assetTrack?.formatDescriptions.first {
formatDescription = first as! CMFormatDescription
} else {
formatDescription = nil
}
bitDepth = formatDescription?.bitDepth ?? 0
// swiftlint:enable force_cast
description = (formatDescription?.mediaSubType ?? .boxed).rawValue.string
#if os(xrOS)
Task {
isPlayable = await (try? track.assetTrack?.load(.isPlayable)) ?? false
}
#endif
}
func load() {}
}
public extension AVAsset {
func createImageGenerator() -> AVAssetImageGenerator {
let imageGenerator = AVAssetImageGenerator(asset: self)
imageGenerator.requestedTimeToleranceBefore = .zero
imageGenerator.requestedTimeToleranceAfter = .zero
return imageGenerator
}
func thumbnailImage(currentTime: CMTime, handler: @escaping (CGImage?) -> Void) {
let imageGenerator = createImageGenerator()
imageGenerator.requestedTimeToleranceBefore = .zero
imageGenerator.requestedTimeToleranceAfter = .zero
imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: currentTime)]) { _, cgImage, _, _, _ in
if let cgImage {
handler(cgImage)
} else {
handler(nil)
}
}
}
}

View File

@@ -0,0 +1,679 @@
//
// KSOptions.swift
// KSPlayer-tvOS
//
// Created by kintan on 2018/3/9.
//
import AVFoundation
#if os(tvOS) || os(xrOS)
import DisplayCriteria
#endif
import OSLog
#if canImport(UIKit)
import UIKit
#endif
open class KSOptions {
///
@Published
public var preferredForwardBufferDuration = KSOptions.preferredForwardBufferDuration
///
public var maxBufferDuration = KSOptions.maxBufferDuration
///
public var isSecondOpen = KSOptions.isSecondOpen
/// seek
public var isAccurateSeek = KSOptions.isAccurateSeek
/// Applies to short videos only
public var isLoopPlay = KSOptions.isLoopPlay
/// seek
public var isSeekedAutoPlay = KSOptions.isSeekedAutoPlay
/*
AVSEEK_FLAG_BACKWARD: 1
AVSEEK_FLAG_BYTE: 2
AVSEEK_FLAG_ANY: 4
AVSEEK_FLAG_FRAME: 8
*/
public var seekFlags = Int32(1)
// ffmpeg only cache http
// ff_tempfile: Cannot open temporary file
public var cache = false
// record stream
public var outputURL: URL?
public var avOptions = [String: Any]()
public var formatContextOptions = [String: Any]()
public var decoderOptions = [String: Any]()
public var probesize: Int64?
public var maxAnalyzeDuration: Int64?
public var lowres = UInt8(0)
public var nobuffer = false
public var codecLowDelay = false
public var startPlayTime: TimeInterval = 0
public var startPlayRate: Float = 1.0
public var registerRemoteControll: Bool = true //
public var referer: String? {
didSet {
if let referer {
formatContextOptions["referer"] = "Referer: \(referer)"
} else {
formatContextOptions["referer"] = nil
}
}
}
public var userAgent: String? = "KSPlayer" {
didSet {
formatContextOptions["user_agent"] = userAgent
}
}
// audio
public var audioFilters = [String]()
public var syncDecodeAudio = false
// sutile
public var autoSelectEmbedSubtitle = true
public var isSeekImageSubtitle = false
// video
public var display = DisplayEnum.plane
public var videoDelay = 0.0 // s
public var autoDeInterlace = false
public var autoRotate = true
public var destinationDynamicRange: DynamicRange?
public var videoAdaptable = true
public var videoFilters = [String]()
public var syncDecodeVideo = false
public var hardwareDecode = KSOptions.hardwareDecode
public var asynchronousDecompression = KSOptions.asynchronousDecompression
public var videoDisable = false
public var canStartPictureInPictureAutomaticallyFromInline = KSOptions.canStartPictureInPictureAutomaticallyFromInline
public var automaticWindowResize = true
@Published
public var videoInterlacingType: VideoInterlacingType?
private var videoClockDelayCount = 0
public internal(set) var formatName = ""
public internal(set) var prepareTime = 0.0
public internal(set) var dnsStartTime = 0.0
public internal(set) var tcpStartTime = 0.0
public internal(set) var tcpConnectedTime = 0.0
public internal(set) var openTime = 0.0
public internal(set) var findTime = 0.0
public internal(set) var readyTime = 0.0
public internal(set) var readAudioTime = 0.0
public internal(set) var readVideoTime = 0.0
public internal(set) var decodeAudioTime = 0.0
public internal(set) var decodeVideoTime = 0.0
public init() {
formatContextOptions["user_agent"] = userAgent
// protocols.texi http.c
// FieldOrder
formatContextOptions["scan_all_pmts"] = 1
// ts
formatContextOptions["reconnect"] = 1
formatContextOptions["reconnect_streamed"] = 1
// httpkeep-alivevlc
//
// formatContextOptions["multiple_requests"] = 1
// ts
// formatContextOptions["auto_convert"] = 0
// formatContextOptions["fps_probe_size"] = 3
// formatContextOptions["rw_timeout"] = 10_000_000
// formatContextOptions["max_analyze_duration"] = 300 * 1000
// m3u8http
// formatContextOptions["protocol_whitelist"] = "file,http,https,tcp,tls,crypto,async,cache,data,httpproxy"
// ipv6
// formatContextOptions["reconnect_at_eof"] = 1
// tcp Failed to resolve hostname
// formatContextOptions["reconnect_on_network_error"] = 1
// There is total different meaning for 'listen_timeout' option in rtmp
// set 'listen_timeout' = -1 for rtmprtsp
// formatContextOptions["listen_timeout"] = 3
decoderOptions["threads"] = "auto"
decoderOptions["refcounted_frames"] = "1"
}
/**
you can add http-header or other options which mentions in https://developer.apple.com/reference/avfoundation/avurlasset/initialization_options
to add http-header init options like this
```
options.appendHeader(["Referer":"https:www.xxx.com"])
```
*/
public func appendHeader(_ header: [String: String]) {
var oldValue = avOptions["AVURLAssetHTTPHeaderFieldsKey"] as? [String: String] ?? [
String: String
]()
oldValue.merge(header) { _, new in new }
avOptions["AVURLAssetHTTPHeaderFieldsKey"] = oldValue
var str = formatContextOptions["headers"] as? String ?? ""
for (key, value) in header {
str.append("\(key):\(value)\r\n")
}
formatContextOptions["headers"] = str
}
public func setCookie(_ cookies: [HTTPCookie]) {
avOptions[AVURLAssetHTTPCookiesKey] = cookies
let cookieStr = cookies.map { cookie in "\(cookie.name)=\(cookie.value)" }.joined(separator: "; ")
appendHeader(["Cookie": cookieStr])
}
//
open func playable(capacitys: [CapacityProtocol], isFirst: Bool, isSeek: Bool) -> LoadingState {
let packetCount = capacitys.map(\.packetCount).min() ?? 0
let frameCount = capacitys.map(\.frameCount).min() ?? 0
let isEndOfFile = capacitys.allSatisfy(\.isEndOfFile)
let loadedTime = capacitys.map(\.loadedTime).min() ?? 0
let progress = preferredForwardBufferDuration == 0 ? 100 : loadedTime * 100.0 / preferredForwardBufferDuration
let isPlayable = capacitys.allSatisfy { capacity in
if capacity.isEndOfFile && capacity.packetCount == 0 {
return true
}
guard capacity.frameCount >= 2 else {
return false
}
if capacity.isEndOfFile {
return true
}
if (syncDecodeVideo && capacity.mediaType == .video) || (syncDecodeAudio && capacity.mediaType == .audio) {
return true
}
if isFirst || isSeek {
//
if capacity.mediaType == .audio || isSecondOpen {
if isFirst {
return true
} else {
return capacity.loadedTime >= self.preferredForwardBufferDuration / 2
}
}
}
return capacity.loadedTime >= self.preferredForwardBufferDuration
}
return LoadingState(loadedTime: loadedTime, progress: progress, packetCount: packetCount,
frameCount: frameCount, isEndOfFile: isEndOfFile, isPlayable: isPlayable,
isFirst: isFirst, isSeek: isSeek)
}
open func adaptable(state: VideoAdaptationState?) -> (Int64, Int64)? {
guard let state, let last = state.bitRateStates.last, CACurrentMediaTime() - last.time > maxBufferDuration / 2, let index = state.bitRates.firstIndex(of: last.bitRate) else {
return nil
}
let isUp = state.loadedCount > Int(Double(state.fps) * maxBufferDuration / 2)
if isUp != state.isPlayable {
return nil
}
if isUp {
if index < state.bitRates.endIndex - 1 {
return (last.bitRate, state.bitRates[index + 1])
}
} else {
if index > state.bitRates.startIndex {
return (last.bitRate, state.bitRates[index - 1])
}
}
return nil
}
/// wanted video stream index, or nil for automatic selection
/// - Parameter : video track
/// - Returns: The index of the track
open func wantedVideo(tracks _: [MediaPlayerTrack]) -> Int? {
nil
}
/// wanted audio stream index, or nil for automatic selection
/// - Parameter : audio track
/// - Returns: The index of the track
open func wantedAudio(tracks _: [MediaPlayerTrack]) -> Int? {
nil
}
open func videoFrameMaxCount(fps _: Float, naturalSize _: CGSize, isLive: Bool) -> UInt8 {
isLive ? 4 : 16
}
open func audioFrameMaxCount(fps: Float, channelCount: Int) -> UInt8 {
let count = (Int(fps) * channelCount) >> 2
if count >= UInt8.max {
return UInt8.max
} else {
return UInt8(count)
}
}
/// customize dar
/// - Parameters:
/// - sar: SAR(Sample Aspect Ratio)
/// - dar: PAR(Pixel Aspect Ratio)
/// - Returns: DAR(Display Aspect Ratio)
open func customizeDar(sar _: CGSize, par _: CGSize) -> CGSize? {
nil
}
// iOSPIPAVSampleBufferDisplayLayerHDR10+AVSampleBufferDisplayLayer
open func isUseDisplayLayer() -> Bool {
display == .plane
}
open func urlIO(log: String) {
if log.starts(with: "Original list of addresses"), dnsStartTime == 0 {
dnsStartTime = CACurrentMediaTime()
} else if log.starts(with: "Starting connection attempt to"), tcpStartTime == 0 {
tcpStartTime = CACurrentMediaTime()
} else if log.starts(with: "Successfully connected to"), tcpConnectedTime == 0 {
tcpConnectedTime = CACurrentMediaTime()
}
}
private var idetTypeMap = [VideoInterlacingType: UInt]()
open func filter(log: String) {
if log.starts(with: "Repeated Field:"), autoDeInterlace {
for str in log.split(separator: ",") {
let map = str.split(separator: ":")
if map.count >= 2 {
if String(map[0].trimmingCharacters(in: .whitespaces)) == "Multi frame" {
if let type = VideoInterlacingType(rawValue: map[1].trimmingCharacters(in: .whitespacesAndNewlines)) {
idetTypeMap[type] = (idetTypeMap[type] ?? 0) + 1
let tff = idetTypeMap[.tff] ?? 0
let bff = idetTypeMap[.bff] ?? 0
let progressive = idetTypeMap[.progressive] ?? 0
let undetermined = idetTypeMap[.undetermined] ?? 0
if progressive - tff - bff > 100 {
videoInterlacingType = .progressive
autoDeInterlace = false
} else if bff - progressive > 100 {
videoInterlacingType = .bff
autoDeInterlace = false
} else if tff - progressive > 100 {
videoInterlacingType = .tff
autoDeInterlace = false
} else if undetermined - progressive - tff - bff > 100 {
videoInterlacingType = .undetermined
autoDeInterlace = false
}
}
}
}
}
}
}
open func sei(string: String) {
KSLog("sei \(string)")
}
/**
KSOptionsassetTrackfieldOrderttbbvideofilters
*/
open func process(assetTrack: some MediaPlayerTrack) {
if assetTrack.mediaType == .video {
if [FFmpegFieldOrder.bb, .bt, .tt, .tb].contains(assetTrack.fieldOrder) {
// todo yadif_videotoolboxcrash
hardwareDecode = false
asynchronousDecompression = false
let yadif = hardwareDecode ? "yadif_videotoolbox" : "yadif"
var yadifMode = KSOptions.yadifMode
// if let assetTrack = assetTrack as? FFmpegAssetTrack {
// if assetTrack.realFrameRate.num == 2 * assetTrack.avgFrameRate.num, assetTrack.realFrameRate.den == assetTrack.avgFrameRate.den {
// if yadifMode == 1 {
// yadifMode = 0
// } else if yadifMode == 3 {
// yadifMode = 2
// }
// }
// }
if KSOptions.deInterlaceAddIdet {
videoFilters.append("idet")
}
videoFilters.append("\(yadif)=mode=\(yadifMode):parity=-1:deint=1")
if yadifMode == 1 || yadifMode == 3 {
assetTrack.nominalFrameRate = assetTrack.nominalFrameRate * 2
}
}
}
}
@MainActor
open func updateVideo(refreshRate: Float, isDovi: Bool, formatDescription: CMFormatDescription?) {
#if os(tvOS) || os(xrOS)
/**
preferredDisplayCriteriaisDisplayModeSwitchInProgresstrue
退3sisDisplayModeSwitchInProgress
*/
guard let displayManager = UIApplication.shared.windows.first?.avDisplayManager,
displayManager.isDisplayCriteriaMatchingEnabled
else {
return
}
if let dynamicRange = isDovi ? .dolbyVision : formatDescription?.dynamicRange {
displayManager.preferredDisplayCriteria = AVDisplayCriteria(refreshRate: refreshRate, videoDynamicRange: dynamicRange.rawValue)
}
#endif
}
open func videoClockSync(main: KSClock, nextVideoTime: TimeInterval, fps: Double, frameCount: Int) -> (Double, ClockProcessType) {
let desire = main.getTime() - videoDelay
let diff = nextVideoTime - desire
// print("[video] video diff \(diff) nextVideoTime \(nextVideoTime) main \(main.time.seconds)")
if diff >= 1 / fps / 2 {
videoClockDelayCount = 0
return (diff, .remain)
} else {
if diff < -4 / fps {
videoClockDelayCount += 1
let log = "[video] video delay=\(diff), clock=\(desire), delay count=\(videoClockDelayCount), frameCount=\(frameCount)"
if frameCount == 1 {
if diff < -1, videoClockDelayCount % 10 == 0 {
KSLog("\(log) drop gop Packet")
return (diff, .dropGOPPacket)
} else if videoClockDelayCount % 5 == 0 {
KSLog("\(log) drop next frame")
return (diff, .dropNextFrame)
} else {
return (diff, .next)
}
} else {
if diff < -8, videoClockDelayCount % 100 == 0 {
KSLog("\(log) seek video track")
return (diff, .seek)
}
if diff < -1, videoClockDelayCount % 10 == 0 {
KSLog("\(log) flush video track")
return (diff, .flush)
}
if videoClockDelayCount % 2 == 0 {
KSLog("\(log) drop next frame")
return (diff, .dropNextFrame)
} else {
return (diff, .next)
}
}
} else {
videoClockDelayCount = 0
return (diff, .next)
}
}
}
open func availableDynamicRange(_ contentRange: DynamicRange?) -> DynamicRange? {
#if canImport(UIKit)
let availableHDRModes = AVPlayer.availableHDRModes
if let preferedDynamicRange = destinationDynamicRange {
// value of 0 indicates that no HDR modes are supported.
if availableHDRModes == AVPlayer.HDRMode(rawValue: 0) {
return .sdr
} else if availableHDRModes.contains(preferedDynamicRange.hdrMode) {
return preferedDynamicRange
} else if let contentRange,
availableHDRModes.contains(contentRange.hdrMode)
{
return contentRange
} else if preferedDynamicRange != .sdr { // trying update to HDR mode
return availableHDRModes.dynamicRange
}
}
return contentRange
#else
return destinationDynamicRange ?? contentRange
#endif
}
open func playerLayerDeinit() {
#if os(tvOS) || os(xrOS)
runOnMainThread {
UIApplication.shared.windows.first?.avDisplayManager.preferredDisplayCriteria = nil
}
#endif
}
open func liveAdaptivePlaybackRate(loadingState _: LoadingState) -> Float? {
nil
// if loadingState.isFirst {
// return nil
// }
// if loadingState.loadedTime > preferredForwardBufferDuration + 5 {
// return 1.2
// } else if loadingState.loadedTime < preferredForwardBufferDuration / 2 {
// return 0.8
// } else {
// return 1
// }
}
open func process(url _: URL) -> AbstractAVIOContext? {
nil
}
}
public enum VideoInterlacingType: String {
case tff
case bff
case progressive
case undetermined
}
public extension KSOptions {
static var firstPlayerType: MediaPlayerProtocol.Type = KSAVPlayer.self
static var secondPlayerType: MediaPlayerProtocol.Type? = KSMEPlayer.self
///
static var preferredForwardBufferDuration = 3.0
///
static var maxBufferDuration = 30.0
///
static var isSecondOpen = false
/// seek
static var isAccurateSeek = false
/// Applies to short videos only
static var isLoopPlay = false
/// true
static var isAutoPlay = true
/// seek
static var isSeekedAutoPlay = true
static var hardwareDecode = true
// AVPacketptsAVFramepts
static var asynchronousDecompression = false
static var isPipPopViewController = false
static var canStartPictureInPictureAutomaticallyFromInline = true
static var preferredFrame = true
static var useSystemHTTPProxy = true
///
static var logLevel = LogLevel.warning
static var logger: LogHandler = OSLog(lable: "KSPlayer")
internal static func deviceCpuCount() -> Int {
var ncpu = UInt(0)
var len: size_t = MemoryLayout.size(ofValue: ncpu)
sysctlbyname("hw.ncpu", &ncpu, &len, nil, 0)
return Int(ncpu)
}
static func setAudioSession() {
#if os(macOS)
// try? AVAudioSession.sharedInstance().setRouteSharingPolicy(.longFormAudio)
#else
var category = AVAudioSession.sharedInstance().category
if category != .playAndRecord {
category = .playback
}
#if os(tvOS)
try? AVAudioSession.sharedInstance().setCategory(category, mode: .moviePlayback, policy: .longFormAudio)
#else
try? AVAudioSession.sharedInstance().setCategory(category, mode: .moviePlayback, policy: .longFormVideo)
#endif
try? AVAudioSession.sharedInstance().setActive(true)
#endif
}
#if !os(macOS)
static func isSpatialAudioEnabled(channelCount _: AVAudioChannelCount) -> Bool {
if #available(tvOS 15.0, iOS 15.0, *) {
let isSpatialAudioEnabled = AVAudioSession.sharedInstance().currentRoute.outputs.contains { $0.isSpatialAudioEnabled }
try? AVAudioSession.sharedInstance().setSupportsMultichannelContent(isSpatialAudioEnabled)
return isSpatialAudioEnabled
} else {
return false
}
}
static func outputNumberOfChannels(channelCount: AVAudioChannelCount) -> AVAudioChannelCount {
let maximumOutputNumberOfChannels = AVAudioChannelCount(AVAudioSession.sharedInstance().maximumOutputNumberOfChannels)
let preferredOutputNumberOfChannels = AVAudioChannelCount(AVAudioSession.sharedInstance().preferredOutputNumberOfChannels)
let isSpatialAudioEnabled = isSpatialAudioEnabled(channelCount: channelCount)
let isUseAudioRenderer = KSOptions.audioPlayerType == AudioRendererPlayer.self
KSLog("[audio] maximumOutputNumberOfChannels: \(maximumOutputNumberOfChannels), preferredOutputNumberOfChannels: \(preferredOutputNumberOfChannels), isSpatialAudioEnabled: \(isSpatialAudioEnabled), isUseAudioRenderer: \(isUseAudioRenderer) ")
let maxRouteChannelsCount = AVAudioSession.sharedInstance().currentRoute.outputs.compactMap {
$0.channels?.count
}.max() ?? 2
KSLog("[audio] currentRoute max channels: \(maxRouteChannelsCount)")
var channelCount = channelCount
if channelCount > 2 {
let minChannels = min(maximumOutputNumberOfChannels, channelCount)
#if os(tvOS) || targetEnvironment(simulator)
if !(isUseAudioRenderer && isSpatialAudioEnabled) {
// maxRouteChannelsCount22outputNumberOfChannels2
// channelCount = AVAudioChannelCount(min(AVAudioSession.sharedInstance().outputNumberOfChannels, maxRouteChannelsCount))
channelCount = minChannels
}
#else
// iOS
if !isSpatialAudioEnabled {
channelCount = minChannels
}
#endif
} else {
channelCount = 2
}
// setPreferredOutputNumberOfChannels,
KSLog("[audio] outputNumberOfChannels: \(AVAudioSession.sharedInstance().outputNumberOfChannels) output channelCount: \(channelCount)")
return channelCount
}
#endif
}
public enum LogLevel: Int32, CustomStringConvertible {
case panic = 0
case fatal = 8
case error = 16
case warning = 24
case info = 32
case verbose = 40
case debug = 48
case trace = 56
public var description: String {
switch self {
case .panic:
return "panic"
case .fatal:
return "fault"
case .error:
return "error"
case .warning:
return "warning"
case .info:
return "info"
case .verbose:
return "verbose"
case .debug:
return "debug"
case .trace:
return "trace"
}
}
}
public extension LogLevel {
var logType: OSLogType {
switch self {
case .panic, .fatal:
return .fault
case .error:
return .error
case .warning:
return .debug
case .info, .verbose, .debug:
return .info
case .trace:
return .default
}
}
}
public protocol LogHandler {
@inlinable
func log(level: LogLevel, message: CustomStringConvertible, file: String, function: String, line: UInt)
}
public class OSLog: LogHandler {
public let label: String
public init(lable: String) {
label = lable
}
@inlinable
public func log(level: LogLevel, message: CustomStringConvertible, file: String, function: String, line: UInt) {
os_log(level.logType, "%@ %@: %@:%d %@ | %@", level.description, label, file, line, function, message.description)
}
}
public class FileLog: LogHandler {
public let fileHandle: FileHandle
public let formatter = DateFormatter()
public init(fileHandle: FileHandle) {
self.fileHandle = fileHandle
formatter.dateFormat = "MM-dd HH:mm:ss.SSSSSS"
}
@inlinable
public func log(level: LogLevel, message: CustomStringConvertible, file: String, function: String, line: UInt) {
let string = String(format: "%@ %@ %@:%d %@ | %@\n", formatter.string(from: Date()), level.description, file, line, function, message.description)
if let data = string.data(using: .utf8) {
fileHandle.write(data)
}
}
}
@inlinable
public func KSLog(_ error: @autoclosure () -> Error, file: String = #file, function: String = #function, line: UInt = #line) {
KSLog(level: .error, error().localizedDescription, file: file, function: function, line: line)
}
@inlinable
public func KSLog(level: LogLevel = .warning, _ message: @autoclosure () -> CustomStringConvertible, file: String = #file, function: String = #function, line: UInt = #line) {
if level.rawValue <= KSOptions.logLevel.rawValue {
let fileName = (file as NSString).lastPathComponent
KSOptions.logger.log(level: level, message: message(), file: fileName, function: function, line: line)
}
}
@inlinable
public func KSLog(level: LogLevel = .warning, dso: UnsafeRawPointer = #dsohandle, _ message: StaticString, _ args: CVarArg...) {
if level.rawValue <= KSOptions.logLevel.rawValue {
os_log(level.logType, dso: dso, message, args)
}
}
public extension Array {
func toDictionary<Key: Hashable>(with selectKey: (Element) -> Key) -> [Key: Element] {
var dict = [Key: Element]()
forEach { element in
dict[selectKey(element)] = element
}
return dict
}
}
public struct KSClock {
public private(set) var lastMediaTime = CACurrentMediaTime()
public internal(set) var position = Int64(0)
public internal(set) var time = CMTime.zero {
didSet {
lastMediaTime = CACurrentMediaTime()
}
}
func getTime() -> TimeInterval {
time.seconds + CACurrentMediaTime() - lastMediaTime
}
}

View File

@@ -0,0 +1,110 @@
//
// KSPictureInPictureController.swift
// KSPlayer
//
// Created by kintan on 2023/1/28.
//
import AVKit
@available(tvOS 14.0, *)
public class KSPictureInPictureController: AVPictureInPictureController {
private static var pipController: KSPictureInPictureController?
private var originalViewController: UIViewController?
private var view: KSPlayerLayer?
private weak var viewController: UIViewController?
private weak var presentingViewController: UIViewController?
#if canImport(UIKit)
private weak var navigationController: UINavigationController?
#endif
func stop(restoreUserInterface: Bool) {
stopPictureInPicture()
delegate = nil
guard KSOptions.isPipPopViewController else {
return
}
KSPictureInPictureController.pipController = nil
if restoreUserInterface {
#if canImport(UIKit)
runOnMainThread { [weak self] in
guard let self, let viewController, let originalViewController else { return }
if let nav = viewController as? UINavigationController,
nav.viewControllers.isEmpty || (nav.viewControllers.count == 1 && nav.viewControllers[0] != originalViewController)
{
nav.viewControllers = [originalViewController]
}
if let navigationController {
var viewControllers = navigationController.viewControllers
if viewControllers.count > 1, let last = viewControllers.last, type(of: last) == type(of: viewController) {
viewControllers[viewControllers.count - 1] = viewController
navigationController.viewControllers = viewControllers
}
if viewControllers.firstIndex(of: viewController) == nil {
// swiftUI pushviewemptyView
navigationController.pushViewController(viewController, animated: true)
}
} else {
presentingViewController?.present(originalViewController, animated: true)
}
}
#endif
view?.player.isMuted = false
view?.play()
}
originalViewController = nil
view = nil
}
func start(view: KSPlayerLayer) {
startPictureInPicture()
delegate = view
guard KSOptions.isPipPopViewController else {
#if canImport(UIKit)
// 退
runOnMainThread {
UIControl().sendAction(#selector(URLSessionTask.suspend), to: UIApplication.shared, for: nil)
}
#endif
return
}
self.view = view
#if canImport(UIKit)
runOnMainThread { [weak self] in
guard let self, let viewController = view.player.view?.viewController else { return }
originalViewController = viewController
if let navigationController = viewController.navigationController, navigationController.viewControllers.count == 1 {
self.viewController = navigationController
} else {
self.viewController = viewController
}
navigationController = self.viewController?.navigationController
if let pre = KSPictureInPictureController.pipController {
view.player.isMuted = true
pre.view?.isPipActive = false
} else {
if let navigationController {
navigationController.popViewController(animated: true)
#if os(iOS)
if navigationController.tabBarController != nil, navigationController.viewControllers.count == 1 {
DispatchQueue.main.async { [weak self] in
self?.navigationController?.setToolbarHidden(false, animated: true)
}
}
#endif
} else {
presentingViewController = originalViewController?.presentingViewController
originalViewController?.dismiss(animated: true)
}
}
}
#endif
KSPictureInPictureController.pipController = self
}
static func mute() {
pipController?.view?.player.isMuted = true
}
}

View File

@@ -0,0 +1,707 @@
//
// KSPlayerLayerView.swift
// Pods
//
// Created by kintan on 16/4/28.
//
//
import AVFoundation
import AVKit
import MediaPlayer
#if canImport(UIKit)
import UIKit
#else
import AppKit
#endif
/**
Player status emun
- setURL: set url
- readyToPlay: player ready to play
- buffering: player buffering
- bufferFinished: buffer finished
- playedToTheEnd: played to the End
- error: error with playing
*/
public enum KSPlayerState: CustomStringConvertible {
case initialized
case preparing
case readyToPlay
case buffering
case bufferFinished
case paused
case playedToTheEnd
case error
public var description: String {
switch self {
case .initialized:
return "initialized"
case .preparing:
return "preparing"
case .readyToPlay:
return "readyToPlay"
case .buffering:
return "buffering"
case .bufferFinished:
return "bufferFinished"
case .paused:
return "paused"
case .playedToTheEnd:
return "playedToTheEnd"
case .error:
return "error"
}
}
public var isPlaying: Bool { self == .buffering || self == .bufferFinished }
}
@MainActor
public protocol KSPlayerLayerDelegate: AnyObject {
func player(layer: KSPlayerLayer, state: KSPlayerState)
func player(layer: KSPlayerLayer, currentTime: TimeInterval, totalTime: TimeInterval)
func player(layer: KSPlayerLayer, finish error: Error?)
func player(layer: KSPlayerLayer, bufferedCount: Int, consumeTime: TimeInterval)
}
open class KSPlayerLayer: NSObject {
public weak var delegate: KSPlayerLayerDelegate?
@Published
public var bufferingProgress: Int = 0
@Published
public var loopCount: Int = 0
@Published
public var isPipActive = false {
didSet {
if #available(tvOS 14.0, *) {
guard let pipController = player.pipController else {
return
}
if isPipActive {
// asyncpip
DispatchQueue.main.async { [weak self] in
guard let self else { return }
pipController.start(view: self)
}
} else {
pipController.stop(restoreUserInterface: true)
}
}
}
}
public private(set) var options: KSOptions
public var player: MediaPlayerProtocol {
didSet {
KSLog("player is \(player)")
state = .initialized
runOnMainThread { [weak self] in
guard let self else { return }
if let oldView = oldValue.view, let superview = oldView.superview, let view = player.view {
#if canImport(UIKit)
superview.insertSubview(view, belowSubview: oldView)
#else
superview.addSubview(view, positioned: .below, relativeTo: oldView)
#endif
view.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
view.topAnchor.constraint(equalTo: superview.topAnchor),
view.leadingAnchor.constraint(equalTo: superview.leadingAnchor),
view.bottomAnchor.constraint(equalTo: superview.bottomAnchor),
view.trailingAnchor.constraint(equalTo: superview.trailingAnchor),
])
}
oldValue.view?.removeFromSuperview()
}
player.playbackRate = oldValue.playbackRate
player.playbackVolume = oldValue.playbackVolume
player.delegate = self
player.contentMode = .scaleAspectFit
if isAutoPlay {
prepareToPlay()
}
}
}
public private(set) var url: URL {
didSet {
let firstPlayerType: MediaPlayerProtocol.Type
if isWirelessRouteActive {
// airplay使KSAVPlayer
firstPlayerType = KSAVPlayer.self
} else if options.display != .plane {
// ARKSMEPlayer
// swiftlint:disable force_cast
firstPlayerType = NSClassFromString("KSPlayer.KSMEPlayer") as! MediaPlayerProtocol.Type
// swiftlint:enable force_cast
} else {
firstPlayerType = KSOptions.firstPlayerType
}
if type(of: player) == firstPlayerType {
if url == oldValue {
if isAutoPlay {
play()
}
} else {
stop()
player.replace(url: url, options: options)
if isAutoPlay {
prepareToPlay()
}
}
} else {
stop()
player = firstPlayerType.init(url: url, options: options)
}
}
}
///
public private(set) var state = KSPlayerState.initialized {
willSet {
if state != newValue {
runOnMainThread { [weak self] in
guard let self else { return }
KSLog("playerStateDidChange - \(newValue)")
self.delegate?.player(layer: self, state: newValue)
}
}
}
}
private lazy var timer: Timer = .scheduledTimer(withTimeInterval: 0.1, repeats: true) { [weak self] _ in
guard let self, self.player.isReadyToPlay else {
return
}
self.delegate?.player(layer: self, currentTime: self.player.currentPlaybackTime, totalTime: self.player.duration)
if self.player.playbackState == .playing, self.player.loadState == .playable, self.state == .buffering {
//
self.state = .bufferFinished
}
if self.player.isPlaying {
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPNowPlayingInfoPropertyElapsedPlaybackTime] = self.player.currentPlaybackTime
}
}
private var urls = [URL]()
private var isAutoPlay: Bool
private var isWirelessRouteActive = false
private var bufferedCount = 0
private var shouldSeekTo: TimeInterval = 0
private var startTime: TimeInterval = 0
public init(url: URL, isAutoPlay: Bool = KSOptions.isAutoPlay, options: KSOptions, delegate: KSPlayerLayerDelegate? = nil) {
self.url = url
self.options = options
self.delegate = delegate
let firstPlayerType: MediaPlayerProtocol.Type
if options.display != .plane {
// ARKSMEPlayer
// swiftlint:disable force_cast
firstPlayerType = NSClassFromString("KSPlayer.KSMEPlayer") as! MediaPlayerProtocol.Type
// swiftlint:enable force_cast
} else {
firstPlayerType = KSOptions.firstPlayerType
}
player = firstPlayerType.init(url: url, options: options)
self.isAutoPlay = isAutoPlay
super.init()
player.playbackRate = options.startPlayRate
if options.registerRemoteControll {
registerRemoteControllEvent()
}
player.delegate = self
player.contentMode = .scaleAspectFit
if isAutoPlay {
prepareToPlay()
}
#if canImport(UIKit)
runOnMainThread { [weak self] in
guard let self else { return }
NotificationCenter.default.addObserver(self, selector: #selector(enterBackground), name: UIApplication.didEnterBackgroundNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(enterForeground), name: UIApplication.willEnterForegroundNotification, object: nil)
}
#if !os(xrOS)
NotificationCenter.default.addObserver(self, selector: #selector(wirelessRouteActiveDidChange(notification:)), name: .MPVolumeViewWirelessRouteActiveDidChange, object: nil)
#endif
#endif
#if !os(macOS)
NotificationCenter.default.addObserver(self, selector: #selector(audioInterrupted), name: AVAudioSession.interruptionNotification, object: nil)
#endif
}
@available(*, unavailable)
public required init?(coder _: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
if #available(iOS 15.0, tvOS 15.0, macOS 12.0, *) {
player.pipController?.contentSource = nil
}
NotificationCenter.default.removeObserver(self)
MPNowPlayingInfoCenter.default().nowPlayingInfo = nil
MPRemoteCommandCenter.shared().playCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().pauseCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().togglePlayPauseCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().stopCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().nextTrackCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().previousTrackCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().changeRepeatModeCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().changePlaybackRateCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().skipForwardCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().skipBackwardCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().changePlaybackPositionCommand.removeTarget(nil)
MPRemoteCommandCenter.shared().enableLanguageOptionCommand.removeTarget(nil)
options.playerLayerDeinit()
}
public func set(url: URL, options: KSOptions) {
self.options = options
runOnMainThread {
self.url = url
}
}
public func set(urls: [URL], options: KSOptions) {
self.options = options
self.urls.removeAll()
self.urls.append(contentsOf: urls)
if let first = urls.first {
runOnMainThread {
self.url = first
}
}
}
open func play() {
runOnMainThread {
UIApplication.shared.isIdleTimerDisabled = true
}
isAutoPlay = true
if state == .error || state == .initialized {
prepareToPlay()
}
if player.isReadyToPlay {
if state == .playedToTheEnd {
player.seek(time: 0) { [weak self] finished in
guard let self else { return }
if finished {
self.player.play()
}
}
} else {
player.play()
}
timer.fireDate = Date.distantPast
}
state = player.loadState == .playable ? .bufferFinished : .buffering
MPNowPlayingInfoCenter.default().playbackState = .playing
if #available(tvOS 14.0, *) {
KSPictureInPictureController.mute()
}
}
open func pause() {
isAutoPlay = false
player.pause()
timer.fireDate = Date.distantFuture
state = .paused
MPNowPlayingInfoCenter.default().playbackState = .paused
runOnMainThread {
UIApplication.shared.isIdleTimerDisabled = false
}
}
public func stop() {
KSLog("stop Player")
state = .initialized
player.shutdown()
bufferedCount = 0
shouldSeekTo = 0
player.playbackRate = 1
player.playbackVolume = 1
MPNowPlayingInfoCenter.default().nowPlayingInfo = nil
runOnMainThread {
UIApplication.shared.isIdleTimerDisabled = false
}
}
open func seek(time: TimeInterval, autoPlay: Bool, completion: @escaping ((Bool) -> Void)) {
if time.isInfinite || time.isNaN {
completion(false)
}
if player.isReadyToPlay, player.seekable {
player.seek(time: time) { [weak self] finished in
guard let self else { return }
if finished, autoPlay {
self.play()
}
completion(finished)
}
} else {
isAutoPlay = autoPlay
shouldSeekTo = time
completion(false)
}
}
}
// MARK: - MediaPlayerDelegate
extension KSPlayerLayer: MediaPlayerDelegate {
public func readyToPlay(player: some MediaPlayerProtocol) {
state = .readyToPlay
#if os(macOS)
runOnMainThread { [weak self] in
guard let self else { return }
if let window = player.view?.window {
window.isMovableByWindowBackground = true
if options.automaticWindowResize {
let naturalSize = player.naturalSize
if naturalSize.width > 0, naturalSize.height > 0 {
window.aspectRatio = naturalSize
var frame = window.frame
frame.size.height = frame.width * naturalSize.height / naturalSize.width
window.setFrame(frame, display: true)
}
}
}
}
#endif
#if !os(macOS) && !os(tvOS)
if #available(iOS 14.2, *) {
if options.canStartPictureInPictureAutomaticallyFromInline {
player.pipController?.canStartPictureInPictureAutomaticallyFromInline = true
}
}
#endif
updateNowPlayingInfo()
if isAutoPlay {
if shouldSeekTo > 0 {
seek(time: shouldSeekTo, autoPlay: true) { [weak self] _ in
guard let self else { return }
self.shouldSeekTo = 0
}
} else {
play()
}
}
}
public func changeLoadState(player: some MediaPlayerProtocol) {
guard player.playbackState != .seeking else { return }
if player.loadState == .playable, startTime > 0 {
let diff = CACurrentMediaTime() - startTime
runOnMainThread { [weak self] in
guard let self else { return }
delegate?.player(layer: self, bufferedCount: bufferedCount, consumeTime: diff)
}
if bufferedCount == 0 {
var dic = ["firstTime": diff]
if options.tcpConnectedTime > 0 {
dic["initTime"] = options.dnsStartTime - startTime
dic["dnsTime"] = options.tcpStartTime - options.dnsStartTime
dic["tcpTime"] = options.tcpConnectedTime - options.tcpStartTime
dic["openTime"] = options.openTime - options.tcpConnectedTime
dic["findTime"] = options.findTime - options.openTime
} else {
dic["openTime"] = options.openTime - startTime
}
dic["findTime"] = options.findTime - options.openTime
dic["readyTime"] = options.readyTime - options.findTime
dic["readVideoTime"] = options.readVideoTime - options.readyTime
dic["readAudioTime"] = options.readAudioTime - options.readyTime
dic["decodeVideoTime"] = options.decodeVideoTime - options.readVideoTime
dic["decodeAudioTime"] = options.decodeAudioTime - options.readAudioTime
KSLog(dic)
}
bufferedCount += 1
startTime = 0
}
guard state.isPlaying else { return }
if player.loadState == .playable {
state = .bufferFinished
} else {
if state == .bufferFinished {
startTime = CACurrentMediaTime()
}
state = .buffering
}
}
public func changeBuffering(player _: some MediaPlayerProtocol, progress: Int) {
bufferingProgress = progress
}
public func playBack(player _: some MediaPlayerProtocol, loopCount: Int) {
self.loopCount = loopCount
}
public func finish(player: some MediaPlayerProtocol, error: Error?) {
if let error {
if type(of: player) != KSOptions.secondPlayerType, let secondPlayerType = KSOptions.secondPlayerType {
self.player = secondPlayerType.init(url: url, options: options)
return
}
state = .error
KSLog(error as CustomStringConvertible)
} else {
let duration = player.duration
runOnMainThread { [weak self] in
guard let self else { return }
delegate?.player(layer: self, currentTime: duration, totalTime: duration)
}
state = .playedToTheEnd
}
timer.fireDate = Date.distantFuture
bufferedCount = 1
runOnMainThread { [weak self] in
guard let self else { return }
delegate?.player(layer: self, finish: error)
}
if error == nil {
nextPlayer()
}
}
}
// MARK: - AVPictureInPictureControllerDelegate
@available(tvOS 14.0, *)
extension KSPlayerLayer: AVPictureInPictureControllerDelegate {
public func pictureInPictureControllerDidStopPictureInPicture(_: AVPictureInPictureController) {
player.pipController?.stop(restoreUserInterface: false)
}
public func pictureInPictureController(_: AVPictureInPictureController, restoreUserInterfaceForPictureInPictureStopWithCompletionHandler _: @escaping (Bool) -> Void) {
isPipActive = false
}
}
// MARK: - private functions
extension KSPlayerLayer {
open func prepareToPlay() {
state = .preparing
startTime = CACurrentMediaTime()
bufferedCount = 0
player.prepareToPlay()
}
private func updateNowPlayingInfo() {
if MPNowPlayingInfoCenter.default().nowPlayingInfo == nil {
MPNowPlayingInfoCenter.default().nowPlayingInfo = [MPMediaItemPropertyPlaybackDuration: player.duration]
} else {
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPMediaItemPropertyPlaybackDuration] = player.duration
}
if MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPMediaItemPropertyTitle] == nil, let title = player.dynamicInfo?.metadata["title"] {
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPMediaItemPropertyTitle] = title
}
if MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPMediaItemPropertyArtist] == nil, let artist = player.dynamicInfo?.metadata["artist"] {
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPMediaItemPropertyArtist] = artist
}
var current: [MPNowPlayingInfoLanguageOption] = []
var langs: [MPNowPlayingInfoLanguageOptionGroup] = []
for track in player.tracks(mediaType: .audio) {
if let lang = track.language {
let audioLang = MPNowPlayingInfoLanguageOption(type: .audible, languageTag: lang, characteristics: nil, displayName: track.name, identifier: track.name)
let audioGroup = MPNowPlayingInfoLanguageOptionGroup(languageOptions: [audioLang], defaultLanguageOption: nil, allowEmptySelection: false)
langs.append(audioGroup)
if track.isEnabled {
current.append(audioLang)
}
}
}
if !langs.isEmpty {
MPRemoteCommandCenter.shared().enableLanguageOptionCommand.isEnabled = true
}
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPNowPlayingInfoPropertyAvailableLanguageOptions] = langs
MPNowPlayingInfoCenter.default().nowPlayingInfo?[MPNowPlayingInfoPropertyCurrentLanguageOptions] = current
}
private func nextPlayer() {
if urls.count > 1, let index = urls.firstIndex(of: url), index < urls.count - 1 {
isAutoPlay = true
url = urls[index + 1]
}
}
private func previousPlayer() {
if urls.count > 1, let index = urls.firstIndex(of: url), index > 0 {
isAutoPlay = true
url = urls[index - 1]
}
}
func seek(time: TimeInterval) {
seek(time: time, autoPlay: options.isSeekedAutoPlay) { _ in
}
}
public func registerRemoteControllEvent() {
let remoteCommand = MPRemoteCommandCenter.shared()
remoteCommand.playCommand.addTarget { [weak self] _ in
guard let self else {
return .commandFailed
}
self.play()
return .success
}
remoteCommand.pauseCommand.addTarget { [weak self] _ in
guard let self else {
return .commandFailed
}
self.pause()
return .success
}
remoteCommand.togglePlayPauseCommand.addTarget { [weak self] _ in
guard let self else {
return .commandFailed
}
if self.state.isPlaying {
self.pause()
} else {
self.play()
}
return .success
}
remoteCommand.stopCommand.addTarget { [weak self] _ in
guard let self else {
return .commandFailed
}
self.player.shutdown()
return .success
}
remoteCommand.nextTrackCommand.addTarget { [weak self] _ in
guard let self else {
return .commandFailed
}
self.nextPlayer()
return .success
}
remoteCommand.previousTrackCommand.addTarget { [weak self] _ in
guard let self else {
return .commandFailed
}
self.previousPlayer()
return .success
}
remoteCommand.changeRepeatModeCommand.addTarget { [weak self] event in
guard let self, let event = event as? MPChangeRepeatModeCommandEvent else {
return .commandFailed
}
self.options.isLoopPlay = event.repeatType != .off
return .success
}
remoteCommand.changeShuffleModeCommand.isEnabled = false
// remoteCommand.changeShuffleModeCommand.addTarget {})
remoteCommand.changePlaybackRateCommand.supportedPlaybackRates = [0.5, 1, 1.5, 2]
remoteCommand.changePlaybackRateCommand.addTarget { [weak self] event in
guard let self, let event = event as? MPChangePlaybackRateCommandEvent else {
return .commandFailed
}
self.player.playbackRate = event.playbackRate
return .success
}
remoteCommand.skipForwardCommand.preferredIntervals = [15]
remoteCommand.skipForwardCommand.addTarget { [weak self] event in
guard let self, let event = event as? MPSkipIntervalCommandEvent else {
return .commandFailed
}
self.seek(time: self.player.currentPlaybackTime + event.interval)
return .success
}
remoteCommand.skipBackwardCommand.preferredIntervals = [15]
remoteCommand.skipBackwardCommand.addTarget { [weak self] event in
guard let self, let event = event as? MPSkipIntervalCommandEvent else {
return .commandFailed
}
self.seek(time: self.player.currentPlaybackTime - event.interval)
return .success
}
remoteCommand.changePlaybackPositionCommand.addTarget { [weak self] event in
guard let self, let event = event as? MPChangePlaybackPositionCommandEvent else {
return .commandFailed
}
self.seek(time: event.positionTime)
return .success
}
remoteCommand.enableLanguageOptionCommand.addTarget { [weak self] event in
guard let self, let event = event as? MPChangeLanguageOptionCommandEvent else {
return .commandFailed
}
let selectLang = event.languageOption
if selectLang.languageOptionType == .audible,
let trackToSelect = self.player.tracks(mediaType: .audio).first(where: { $0.name == selectLang.displayName })
{
self.player.select(track: trackToSelect)
}
return .success
}
}
@objc private func enterBackground() {
guard state.isPlaying, !player.isExternalPlaybackActive else {
return
}
if #available(tvOS 14.0, *), player.pipController?.isPictureInPictureActive == true {
return
}
if KSOptions.canBackgroundPlay {
player.enterBackground()
return
}
pause()
}
@objc private func enterForeground() {
if KSOptions.canBackgroundPlay {
player.enterForeground()
}
}
#if canImport(UIKit) && !os(xrOS)
@MainActor
@objc private func wirelessRouteActiveDidChange(notification: Notification) {
guard let volumeView = notification.object as? MPVolumeView, isWirelessRouteActive != volumeView.isWirelessRouteActive else { return }
if volumeView.isWirelessRouteActive {
if !player.allowsExternalPlayback {
isWirelessRouteActive = true
}
player.usesExternalPlaybackWhileExternalScreenIsActive = true
}
isWirelessRouteActive = volumeView.isWirelessRouteActive
}
#endif
#if !os(macOS)
@objc private func audioInterrupted(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue)
else {
return
}
switch type {
case .began:
pause()
case .ended:
// An interruption ended. Resume playback, if appropriate.
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
play()
}
default:
break
}
}
#endif
}

View File

@@ -0,0 +1,336 @@
//
// KSVideoPlayer.swift
// KSPlayer
//
// Created by kintan on 2023/2/11.
//
import Foundation
import SwiftUI
#if canImport(UIKit)
import UIKit
#else
import AppKit
public typealias UIViewRepresentable = NSViewRepresentable
#endif
public struct KSVideoPlayer {
public private(set) var coordinator: Coordinator
public let url: URL
public let options: KSOptions
public init(coordinator: Coordinator, url: URL, options: KSOptions) {
self.coordinator = coordinator
self.url = url
self.options = options
}
}
extension KSVideoPlayer: UIViewRepresentable {
public func makeCoordinator() -> Coordinator {
coordinator
}
#if canImport(UIKit)
public typealias UIViewType = UIView
public func makeUIView(context: Context) -> UIViewType {
context.coordinator.makeView(url: url, options: options)
}
public func updateUIView(_ view: UIViewType, context: Context) {
updateView(view, context: context)
}
// iOS tvOSonDisappeardismantleUIView
public static func dismantleUIView(_: UIViewType, coordinator: Coordinator) {
coordinator.resetPlayer()
}
#else
public typealias NSViewType = UIView
public func makeNSView(context: Context) -> NSViewType {
context.coordinator.makeView(url: url, options: options)
}
public func updateNSView(_ view: NSViewType, context: Context) {
updateView(view, context: context)
}
// macOSonDisappeardismantleNSView
public static func dismantleNSView(_ view: NSViewType, coordinator: Coordinator) {
coordinator.resetPlayer()
view.window?.aspectRatio = CGSize(width: 16, height: 9)
}
#endif
@MainActor
private func updateView(_: UIView, context: Context) {
if context.coordinator.playerLayer?.url != url {
_ = context.coordinator.makeView(url: url, options: options)
}
}
@MainActor
public final class Coordinator: ObservableObject {
public var state: KSPlayerState {
playerLayer?.state ?? .initialized
}
@Published
public var isMuted: Bool = false {
didSet {
playerLayer?.player.isMuted = isMuted
}
}
@Published
public var playbackVolume: Float = 1.0 {
didSet {
playerLayer?.player.playbackVolume = playbackVolume
}
}
@Published
public var isScaleAspectFill = false {
didSet {
playerLayer?.player.contentMode = isScaleAspectFill ? .scaleAspectFill : .scaleAspectFit
}
}
@Published
public var playbackRate: Float = 1.0 {
didSet {
playerLayer?.player.playbackRate = playbackRate
}
}
@Published
@MainActor
public var isMaskShow = true {
didSet {
if isMaskShow != oldValue {
mask(show: isMaskShow)
}
}
}
public var subtitleModel = SubtitleModel()
public var timemodel = ControllerTimeModel()
// SplitViewmakeUIViewdismantleUIView.ViewplayerLayeronDisappearplayerLayer
public var playerLayer: KSPlayerLayer? {
didSet {
oldValue?.delegate = nil
oldValue?.pause()
}
}
private var delayHide: DispatchWorkItem?
public var onPlay: ((TimeInterval, TimeInterval) -> Void)?
public var onFinish: ((KSPlayerLayer, Error?) -> Void)?
public var onStateChanged: ((KSPlayerLayer, KSPlayerState) -> Void)?
public var onBufferChanged: ((Int, TimeInterval) -> Void)?
#if canImport(UIKit)
fileprivate var onSwipe: ((UISwipeGestureRecognizer.Direction) -> Void)?
@objc fileprivate func swipeGestureAction(_ recognizer: UISwipeGestureRecognizer) {
onSwipe?(recognizer.direction)
}
#endif
public init() {}
public func makeView(url: URL, options: KSOptions) -> UIView {
defer {
DispatchQueue.main.async { [weak self] in
self?.subtitleModel.url = url
}
}
if let playerLayer {
if playerLayer.url == url {
return playerLayer.player.view ?? UIView()
}
playerLayer.delegate = nil
playerLayer.set(url: url, options: options)
playerLayer.delegate = self
return playerLayer.player.view ?? UIView()
} else {
let playerLayer = KSPlayerLayer(url: url, options: options, delegate: self)
self.playerLayer = playerLayer
return playerLayer.player.view ?? UIView()
}
}
public func resetPlayer() {
onStateChanged = nil
onPlay = nil
onFinish = nil
onBufferChanged = nil
#if canImport(UIKit)
onSwipe = nil
#endif
playerLayer = nil
delayHide?.cancel()
delayHide = nil
subtitleModel.selectedSubtitleInfo?.isEnabled = false
}
public func skip(interval: Int) {
if let playerLayer {
seek(time: playerLayer.player.currentPlaybackTime + TimeInterval(interval))
}
}
public func seek(time: TimeInterval) {
playerLayer?.seek(time: TimeInterval(time))
}
@MainActor
public func mask(show: Bool, autoHide: Bool = true) {
isMaskShow = show
if show {
delayHide?.cancel()
//
guard state == .bufferFinished else { return }
if autoHide {
delayHide = DispatchWorkItem { [weak self] in
guard let self else { return }
if self.state == .bufferFinished {
self.isMaskShow = false
}
}
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + KSOptions.animateDelayTimeInterval,
execute: delayHide!)
}
}
#if os(macOS)
show ? NSCursor.unhide() : NSCursor.setHiddenUntilMouseMoves(true)
if let window = playerLayer?.player.view?.window {
if !window.styleMask.contains(.fullScreen) {
window.standardWindowButton(.closeButton)?.superview?.superview?.isHidden = !show
// window.standardWindowButton(.zoomButton)?.isHidden = !show
// window.standardWindowButton(.closeButton)?.isHidden = !show
// window.standardWindowButton(.miniaturizeButton)?.isHidden = !show
// window.titleVisibility = show ? .visible : .hidden
}
}
#endif
}
}
}
extension KSVideoPlayer.Coordinator: KSPlayerLayerDelegate {
public func player(layer: KSPlayerLayer, state: KSPlayerState) {
onStateChanged?(layer, state)
if state == .readyToPlay {
playbackRate = layer.player.playbackRate
if let subtitleDataSouce = layer.player.subtitleDataSouce {
// readyToPlay
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 1) { [weak self] in
guard let self else { return }
self.subtitleModel.addSubtitle(dataSouce: subtitleDataSouce)
if self.subtitleModel.selectedSubtitleInfo == nil, layer.options.autoSelectEmbedSubtitle {
self.subtitleModel.selectedSubtitleInfo = subtitleDataSouce.infos.first { $0.isEnabled }
}
}
}
} else if state == .bufferFinished {
isMaskShow = false
} else {
isMaskShow = true
#if canImport(UIKit)
if state == .preparing, let view = layer.player.view {
let swipeDown = UISwipeGestureRecognizer(target: self, action: #selector(swipeGestureAction(_:)))
swipeDown.direction = .down
view.addGestureRecognizer(swipeDown)
let swipeLeft = UISwipeGestureRecognizer(target: self, action: #selector(swipeGestureAction(_:)))
swipeLeft.direction = .left
view.addGestureRecognizer(swipeLeft)
let swipeRight = UISwipeGestureRecognizer(target: self, action: #selector(swipeGestureAction(_:)))
swipeRight.direction = .right
view.addGestureRecognizer(swipeRight)
let swipeUp = UISwipeGestureRecognizer(target: self, action: #selector(swipeGestureAction(_:)))
swipeUp.direction = .up
view.addGestureRecognizer(swipeUp)
}
#endif
}
}
public func player(layer _: KSPlayerLayer, currentTime: TimeInterval, totalTime: TimeInterval) {
onPlay?(currentTime, totalTime)
if currentTime >= Double(Int.max) || currentTime <= Double(Int.min) || totalTime >= Double(Int.max) || totalTime <= Double(Int.min) {
return
}
let current = Int(currentTime)
let total = Int(max(0, totalTime))
if timemodel.currentTime != current {
timemodel.currentTime = current
}
if timemodel.totalTime != total {
timemodel.totalTime = total
}
_ = subtitleModel.subtitle(currentTime: currentTime)
}
public func player(layer: KSPlayerLayer, finish error: Error?) {
onFinish?(layer, error)
}
public func player(layer _: KSPlayerLayer, bufferedCount: Int, consumeTime: TimeInterval) {
onBufferChanged?(bufferedCount, consumeTime)
}
}
extension KSVideoPlayer: Equatable {
public static func == (lhs: KSVideoPlayer, rhs: KSVideoPlayer) -> Bool {
lhs.url == rhs.url
}
}
@MainActor
public extension KSVideoPlayer {
func onBufferChanged(_ handler: @escaping (Int, TimeInterval) -> Void) -> Self {
coordinator.onBufferChanged = handler
return self
}
/// Playing to the end.
func onFinish(_ handler: @escaping (KSPlayerLayer, Error?) -> Void) -> Self {
coordinator.onFinish = handler
return self
}
func onPlay(_ handler: @escaping (TimeInterval, TimeInterval) -> Void) -> Self {
coordinator.onPlay = handler
return self
}
/// Playback status changes, such as from play to pause.
func onStateChanged(_ handler: @escaping (KSPlayerLayer, KSPlayerState) -> Void) -> Self {
coordinator.onStateChanged = handler
return self
}
#if canImport(UIKit)
func onSwipe(_ handler: @escaping (UISwipeGestureRecognizer.Direction) -> Void) -> Self {
coordinator.onSwipe = handler
return self
}
#endif
}
extension View {
func then(_ body: (inout Self) -> Void) -> Self {
var result = self
body(&result)
return result
}
}
/// modelView
public class ControllerTimeModel: ObservableObject {
// int
@Published
public var currentTime = 0
@Published
public var totalTime = 1
}

View File

@@ -0,0 +1,335 @@
//
// MediaPlayerProtocol.swift
// KSPlayer-tvOS
//
// Created by kintan on 2018/3/9.
//
import AVFoundation
import Foundation
#if canImport(UIKit)
import UIKit
#else
import AppKit
#endif
public protocol MediaPlayback: AnyObject {
var duration: TimeInterval { get }
var fileSize: Double { get }
var naturalSize: CGSize { get }
var chapters: [Chapter] { get }
var currentPlaybackTime: TimeInterval { get }
func prepareToPlay()
func shutdown()
func seek(time: TimeInterval, completion: @escaping ((Bool) -> Void))
}
public class DynamicInfo: ObservableObject {
private let metadataBlock: () -> [String: String]
private let bytesReadBlock: () -> Int64
private let audioBitrateBlock: () -> Int
private let videoBitrateBlock: () -> Int
public var metadata: [String: String] {
metadataBlock()
}
public var bytesRead: Int64 {
bytesReadBlock()
}
public var audioBitrate: Int {
audioBitrateBlock()
}
public var videoBitrate: Int {
videoBitrateBlock()
}
@Published
public var displayFPS = 0.0
public var audioVideoSyncDiff = 0.0
public var droppedVideoFrameCount = UInt32(0)
public var droppedVideoPacketCount = UInt32(0)
init(metadata: @escaping () -> [String: String], bytesRead: @escaping () -> Int64, audioBitrate: @escaping () -> Int, videoBitrate: @escaping () -> Int) {
metadataBlock = metadata
bytesReadBlock = bytesRead
audioBitrateBlock = audioBitrate
videoBitrateBlock = videoBitrate
}
}
public struct Chapter {
public let start: TimeInterval
public let end: TimeInterval
public let title: String
}
public protocol MediaPlayerProtocol: MediaPlayback {
var delegate: MediaPlayerDelegate? { get set }
var view: UIView? { get }
var playableTime: TimeInterval { get }
var isReadyToPlay: Bool { get }
var playbackState: MediaPlaybackState { get }
var loadState: MediaLoadState { get }
var isPlaying: Bool { get }
var seekable: Bool { get }
// var numberOfBytesTransferred: Int64 { get }
var isMuted: Bool { get set }
var allowsExternalPlayback: Bool { get set }
var usesExternalPlaybackWhileExternalScreenIsActive: Bool { get set }
var isExternalPlaybackActive: Bool { get }
var playbackRate: Float { get set }
var playbackVolume: Float { get set }
var contentMode: UIViewContentMode { get set }
var subtitleDataSouce: SubtitleDataSouce? { get }
@available(macOS 12.0, iOS 15.0, tvOS 15.0, *)
var playbackCoordinator: AVPlaybackCoordinator { get }
@available(tvOS 14.0, *)
var pipController: KSPictureInPictureController? { get }
var dynamicInfo: DynamicInfo? { get }
init(url: URL, options: KSOptions)
func replace(url: URL, options: KSOptions)
func play()
func pause()
func enterBackground()
func enterForeground()
func thumbnailImageAtCurrentTime() async -> CGImage?
func tracks(mediaType: AVFoundation.AVMediaType) -> [MediaPlayerTrack]
func select(track: some MediaPlayerTrack)
}
public extension MediaPlayerProtocol {
var nominalFrameRate: Float {
tracks(mediaType: .video).first { $0.isEnabled }?.nominalFrameRate ?? 0
}
}
@MainActor
public protocol MediaPlayerDelegate: AnyObject {
func readyToPlay(player: some MediaPlayerProtocol)
func changeLoadState(player: some MediaPlayerProtocol)
// 0-100
func changeBuffering(player: some MediaPlayerProtocol, progress: Int)
func playBack(player: some MediaPlayerProtocol, loopCount: Int)
func finish(player: some MediaPlayerProtocol, error: Error?)
}
public protocol MediaPlayerTrack: AnyObject, CustomStringConvertible {
var trackID: Int32 { get }
var name: String { get }
var languageCode: String? { get }
var mediaType: AVFoundation.AVMediaType { get }
var nominalFrameRate: Float { get set }
var bitRate: Int64 { get }
var bitDepth: Int32 { get }
var isEnabled: Bool { get set }
var isImageSubtitle: Bool { get }
var rotation: Int16 { get }
var dovi: DOVIDecoderConfigurationRecord? { get }
var fieldOrder: FFmpegFieldOrder { get }
var formatDescription: CMFormatDescription? { get }
}
// public extension MediaPlayerTrack: Identifiable {
// var id: Int32 { trackID }
// }
public enum MediaPlaybackState: Int {
case idle
case playing
case paused
case seeking
case finished
case stopped
}
public enum MediaLoadState: Int {
case idle
case loading
case playable
}
// swiftlint:disable identifier_name
public struct DOVIDecoderConfigurationRecord {
public let dv_version_major: UInt8
public let dv_version_minor: UInt8
public let dv_profile: UInt8
public let dv_level: UInt8
public let rpu_present_flag: UInt8
public let el_present_flag: UInt8
public let bl_present_flag: UInt8
public let dv_bl_signal_compatibility_id: UInt8
}
public enum FFmpegFieldOrder: UInt8 {
case unknown = 0
case progressive
case tt // < Top coded_first, top displayed first
case bb // < Bottom coded first, bottom displayed first
case tb // < Top coded first, bottom displayed first
case bt // < Bottom coded first, top displayed first
}
extension FFmpegFieldOrder: CustomStringConvertible {
public var description: String {
switch self {
case .unknown, .progressive:
return "progressive"
case .tt:
return "top first"
case .bb:
return "bottom first"
case .tb:
return "top coded first (swapped)"
case .bt:
return "bottom coded first (swapped)"
}
}
}
// swiftlint:enable identifier_name
public extension MediaPlayerTrack {
var language: String? {
languageCode.flatMap {
Locale.current.localizedString(forLanguageCode: $0)
}
}
var codecType: FourCharCode {
mediaSubType.rawValue
}
var dynamicRange: DynamicRange? {
if dovi != nil {
return .dolbyVision
} else {
return formatDescription?.dynamicRange
}
}
var colorSpace: CGColorSpace? {
KSOptions.colorSpace(ycbcrMatrix: yCbCrMatrix as CFString?, transferFunction: transferFunction as CFString?)
}
var mediaSubType: CMFormatDescription.MediaSubType {
formatDescription?.mediaSubType ?? .boxed
}
var audioStreamBasicDescription: AudioStreamBasicDescription? {
formatDescription?.audioStreamBasicDescription
}
var naturalSize: CGSize {
formatDescription?.naturalSize ?? .zero
}
var colorPrimaries: String? {
formatDescription?.colorPrimaries
}
var transferFunction: String? {
formatDescription?.transferFunction
}
var yCbCrMatrix: String? {
formatDescription?.yCbCrMatrix
}
}
public extension CMFormatDescription {
var dynamicRange: DynamicRange {
let contentRange: DynamicRange
if codecType.string == "dvhe" || codecType == kCMVideoCodecType_DolbyVisionHEVC {
contentRange = .dolbyVision
} else if bitDepth == 10 || transferFunction == kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ as String { /// HDR
contentRange = .hdr10
} else if transferFunction == kCVImageBufferTransferFunction_ITU_R_2100_HLG as String { /// HLG
contentRange = .hlg
} else {
contentRange = .sdr
}
return contentRange
}
var bitDepth: Int32 {
codecType.bitDepth
}
var codecType: FourCharCode {
mediaSubType.rawValue
}
var colorPrimaries: String? {
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
return dictionary[kCVImageBufferColorPrimariesKey] as? String
} else {
return nil
}
}
var transferFunction: String? {
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
return dictionary[kCVImageBufferTransferFunctionKey] as? String
} else {
return nil
}
}
var yCbCrMatrix: String? {
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
return dictionary[kCVImageBufferYCbCrMatrixKey] as? String
} else {
return nil
}
}
var naturalSize: CGSize {
let aspectRatio = aspectRatio
return CGSize(width: Int(dimensions.width), height: Int(CGFloat(dimensions.height) * aspectRatio.height / aspectRatio.width))
}
var aspectRatio: CGSize {
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
if let ratio = dictionary[kCVImageBufferPixelAspectRatioKey] as? NSDictionary,
let horizontal = (ratio[kCVImageBufferPixelAspectRatioHorizontalSpacingKey] as? NSNumber)?.intValue,
let vertical = (ratio[kCVImageBufferPixelAspectRatioVerticalSpacingKey] as? NSNumber)?.intValue,
horizontal > 0, vertical > 0
{
return CGSize(width: horizontal, height: vertical)
}
}
return CGSize(width: 1, height: 1)
}
var depth: Int32 {
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
return dictionary[kCMFormatDescriptionExtension_Depth] as? Int32 ?? 24
} else {
return 24
}
}
var fullRangeVideo: Bool {
if let dictionary = CMFormatDescriptionGetExtensions(self) as NSDictionary? {
return dictionary[kCMFormatDescriptionExtension_FullRangeVideo] as? Bool ?? false
} else {
return false
}
}
}
func setHttpProxy() {
guard KSOptions.useSystemHTTPProxy else {
return
}
guard let proxySettings = CFNetworkCopySystemProxySettings()?.takeUnretainedValue() as? NSDictionary else {
unsetenv("http_proxy")
return
}
guard let proxyHost = proxySettings[kCFNetworkProxiesHTTPProxy] as? String, let proxyPort = proxySettings[kCFNetworkProxiesHTTPPort] as? Int else {
unsetenv("http_proxy")
return
}
let httpProxy = "http://\(proxyHost):\(proxyPort)"
setenv("http_proxy", httpProxy, 0)
}

View File

@@ -0,0 +1,378 @@
//
// PlayerDefines.swift
// KSPlayer
//
// Created by kintan on 2018/3/9.
//
import AVFoundation
import CoreMedia
import CoreServices
#if canImport(UIKit)
import UIKit
public extension KSOptions {
@MainActor
static var windowScene: UIWindowScene? {
UIApplication.shared.connectedScenes.first as? UIWindowScene
}
@MainActor
static var sceneSize: CGSize {
let window = windowScene?.windows.first
return window?.bounds.size ?? .zero
}
}
#else
import AppKit
import SwiftUI
public typealias UIView = NSView
public typealias UIPasteboard = NSPasteboard
public extension KSOptions {
static var sceneSize: CGSize {
NSScreen.main?.frame.size ?? .zero
}
}
#endif
// extension MediaPlayerTrack {
// static func == (lhs: Self, rhs: Self) -> Bool {
// lhs.trackID == rhs.trackID
// }
// }
public enum DynamicRange: Int32 {
case sdr = 0
case hdr10 = 2
case hlg = 3
case dolbyVision = 5
#if canImport(UIKit)
var hdrMode: AVPlayer.HDRMode {
switch self {
case .sdr:
return AVPlayer.HDRMode(rawValue: 0)
case .hdr10:
return .hdr10 // 2
case .hlg:
return .hlg // 1
case .dolbyVision:
return .dolbyVision // 4
}
}
#endif
public static var availableHDRModes: [DynamicRange] {
#if os(macOS)
if NSScreen.main?.maximumPotentialExtendedDynamicRangeColorComponentValue ?? 1.0 > 1.0 {
return [.hdr10]
} else {
return [.sdr]
}
#else
let availableHDRModes = AVPlayer.availableHDRModes
if availableHDRModes == AVPlayer.HDRMode(rawValue: 0) {
return [.sdr]
} else {
var modes = [DynamicRange]()
if availableHDRModes.contains(.dolbyVision) {
modes.append(.dolbyVision)
}
if availableHDRModes.contains(.hdr10) {
modes.append(.hdr10)
}
if availableHDRModes.contains(.hlg) {
modes.append(.hlg)
}
return modes
}
#endif
}
}
extension DynamicRange: CustomStringConvertible {
public var description: String {
switch self {
case .sdr:
return "SDR"
case .hdr10:
return "HDR10"
case .hlg:
return "HLG"
case .dolbyVision:
return "Dolby Vision"
}
}
}
extension DynamicRange {
var colorPrimaries: CFString {
switch self {
case .sdr:
return kCVImageBufferColorPrimaries_ITU_R_709_2
case .hdr10, .hlg, .dolbyVision:
return kCVImageBufferColorPrimaries_ITU_R_2020
}
}
var transferFunction: CFString {
switch self {
case .sdr:
return kCVImageBufferTransferFunction_ITU_R_709_2
case .hdr10:
return kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ
case .hlg, .dolbyVision:
return kCVImageBufferTransferFunction_ITU_R_2100_HLG
}
}
var yCbCrMatrix: CFString {
switch self {
case .sdr:
return kCVImageBufferYCbCrMatrix_ITU_R_709_2
case .hdr10, .hlg, .dolbyVision:
return kCVImageBufferYCbCrMatrix_ITU_R_2020
}
}
}
@MainActor
public enum DisplayEnum {
case plane
// swiftlint:disable identifier_name
case vr
// swiftlint:enable identifier_name
case vrBox
}
public struct VideoAdaptationState {
public struct BitRateState {
let bitRate: Int64
let time: TimeInterval
}
public let bitRates: [Int64]
public let duration: TimeInterval
public internal(set) var fps: Float
public internal(set) var bitRateStates: [BitRateState]
public internal(set) var currentPlaybackTime: TimeInterval = 0
public internal(set) var isPlayable: Bool = false
public internal(set) var loadedCount: Int = 0
}
public enum ClockProcessType {
case remain
case next
case dropNextFrame
case dropNextPacket
case dropGOPPacket
case flush
case seek
}
//
public protocol CapacityProtocol {
var fps: Float { get }
var packetCount: Int { get }
var frameCount: Int { get }
var frameMaxCount: Int { get }
var isEndOfFile: Bool { get }
var mediaType: AVFoundation.AVMediaType { get }
}
extension CapacityProtocol {
var loadedTime: TimeInterval {
TimeInterval(packetCount + frameCount) / TimeInterval(fps)
}
}
public struct LoadingState {
public let loadedTime: TimeInterval
public let progress: TimeInterval
public let packetCount: Int
public let frameCount: Int
public let isEndOfFile: Bool
public let isPlayable: Bool
public let isFirst: Bool
public let isSeek: Bool
}
public let KSPlayerErrorDomain = "KSPlayerErrorDomain"
public enum KSPlayerErrorCode: Int {
case unknown
case formatCreate
case formatOpenInput
case formatOutputCreate
case formatWriteHeader
case formatFindStreamInfo
case readFrame
case codecContextCreate
case codecContextSetParam
case codecContextFindDecoder
case codesContextOpen
case codecVideoSendPacket
case codecAudioSendPacket
case codecVideoReceiveFrame
case codecAudioReceiveFrame
case auidoSwrInit
case codecSubtitleSendPacket
case videoTracksUnplayable
case subtitleUnEncoding
case subtitleUnParse
case subtitleFormatUnSupport
case subtitleParamsEmpty
}
extension KSPlayerErrorCode: CustomStringConvertible {
public var description: String {
switch self {
case .formatCreate:
return "avformat_alloc_context return nil"
case .formatOpenInput:
return "avformat can't open input"
case .formatOutputCreate:
return "avformat_alloc_output_context2 fail"
case .formatWriteHeader:
return "avformat_write_header fail"
case .formatFindStreamInfo:
return "avformat_find_stream_info return nil"
case .codecContextCreate:
return "avcodec_alloc_context3 return nil"
case .codecContextSetParam:
return "avcodec can't set parameters to context"
case .codesContextOpen:
return "codesContext can't Open"
case .codecVideoReceiveFrame:
return "avcodec can't receive video frame"
case .codecAudioReceiveFrame:
return "avcodec can't receive audio frame"
case .videoTracksUnplayable:
return "VideoTracks are not even playable."
case .codecSubtitleSendPacket:
return "avcodec can't decode subtitle"
case .subtitleUnEncoding:
return "Subtitle encoding format is not supported."
case .subtitleUnParse:
return "Subtitle parsing error"
case .subtitleFormatUnSupport:
return "Current subtitle format is not supported"
case .subtitleParamsEmpty:
return "Subtitle Params is empty"
case .auidoSwrInit:
return "swr_init swrContext fail"
default:
return "unknown"
}
}
}
extension NSError {
convenience init(errorCode: KSPlayerErrorCode, userInfo: [String: Any] = [:]) {
var userInfo = userInfo
userInfo[NSLocalizedDescriptionKey] = errorCode.description
self.init(domain: KSPlayerErrorDomain, code: errorCode.rawValue, userInfo: userInfo)
}
convenience init(description: String) {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = description
self.init(domain: KSPlayerErrorDomain, code: 0, userInfo: userInfo)
}
}
#if !SWIFT_PACKAGE
extension Bundle {
static let module = Bundle(for: KSPlayerLayer.self).path(forResource: "KSPlayer_KSPlayer", ofType: "bundle").flatMap { Bundle(path: $0) } ?? Bundle.main
}
#endif
public enum TimeType {
case min
case hour
case minOrHour
case millisecond
}
public extension TimeInterval {
func toString(for type: TimeType) -> String {
Int(ceil(self)).toString(for: type)
}
}
public extension Int {
func toString(for type: TimeType) -> String {
var second = self
var min = second / 60
second -= min * 60
switch type {
case .min:
return String(format: "%02d:%02d", min, second)
case .hour:
let hour = min / 60
min -= hour * 60
return String(format: "%d:%02d:%02d", hour, min, second)
case .minOrHour:
let hour = min / 60
if hour > 0 {
min -= hour * 60
return String(format: "%d:%02d:%02d", hour, min, second)
} else {
return String(format: "%02d:%02d", min, second)
}
case .millisecond:
var time = self * 100
let millisecond = time % 100
time /= 100
let sec = time % 60
time /= 60
let min = time % 60
time /= 60
let hour = time % 60
if hour > 0 {
return String(format: "%d:%02d:%02d.%02d", hour, min, sec, millisecond)
} else {
return String(format: "%02d:%02d.%02d", min, sec, millisecond)
}
}
}
}
public extension FixedWidthInteger {
var kmFormatted: String {
Double(self).kmFormatted
}
}
open class AbstractAVIOContext {
let bufferSize: Int32
let writable: Bool
public init(bufferSize: Int32 = 32 * 1024, writable: Bool = false) {
self.bufferSize = bufferSize
self.writable = writable
}
open func read(buffer _: UnsafePointer<UInt8>?, size: Int32) -> Int32 {
size
}
open func write(buffer _: UnsafePointer<UInt8>?, size: Int32) -> Int32 {
size
}
/**
#define SEEK_SET 0 /* set file offset to offset */
#define SEEK_CUR 1 /* set file offset to current plus offset */
#define SEEK_END 2 /* set file offset to EOF plus offset */
*/
open func seek(offset: Int64, whence _: Int32) -> Int64 {
offset
}
open func fileSize() -> Int64 {
-1
}
open func close() {}
deinit {}
}