Files
simvision/KSPlayer-main/Sources/KSPlayer/MEPlayer/AudioRendererPlayer.swift
Michael Simard 872354b834 Initial commit: SimVision tvOS streaming app
Features:
- VOD library with movie grouping and version detection
- TV show library with season/episode organization
- TMDB integration for trending shows and recently aired episodes
- Recent releases section with TMDB release date sorting
- Watch history tracking with continue watching
- Playlist caching (12-hour TTL) for offline support
- M3U playlist parsing with XStream API support
- Authentication with credential storage

Technical:
- SwiftUI for tvOS
- Actor-based services for thread safety
- Persistent caching for playlists, TMDB data, and watch history
- KSPlayer integration for video playback

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-21 22:12:08 -06:00

144 lines
4.7 KiB
Swift
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
//
// AudioRendererPlayer.swift
// KSPlayer
//
// Created by kintan on 2022/12/2.
//
import AVFoundation
import Foundation
public class AudioRendererPlayer: AudioOutput {
public var playbackRate: Float = 1 {
didSet {
if !isPaused {
synchronizer.rate = playbackRate
}
}
}
public var volume: Float {
get {
renderer.volume
}
set {
renderer.volume = newValue
}
}
public var isMuted: Bool {
get {
renderer.isMuted
}
set {
renderer.isMuted = newValue
}
}
public weak var renderSource: OutputRenderSourceDelegate?
private var periodicTimeObserver: Any?
private let renderer = AVSampleBufferAudioRenderer()
private let synchronizer = AVSampleBufferRenderSynchronizer()
private let serializationQueue = DispatchQueue(label: "ks.player.serialization.queue")
var isPaused: Bool {
synchronizer.rate == 0
}
public required init() {
synchronizer.addRenderer(renderer)
if #available(macOS 11.3, iOS 14.5, tvOS 14.5, *) {
synchronizer.delaysRateChangeUntilHasSufficientMediaData = false
}
// if #available(tvOS 15.0, iOS 15.0, macOS 12.0, *) {
// renderer.allowedAudioSpatializationFormats = .monoStereoAndMultichannel
// }
}
public func prepare(audioFormat: AVAudioFormat) {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(Int(audioFormat.channelCount))
KSLog("[audio] set preferredOutputNumberOfChannels: \(audioFormat.channelCount)")
#endif
}
public func play() {
let time: CMTime
if #available(macOS 11.3, iOS 14.5, tvOS 14.5, *) {
// seek
if renderer.hasSufficientMediaDataForReliablePlaybackStart {
time = synchronizer.currentTime()
} else {
if let currentRender = renderSource?.getAudioOutputRender() {
time = currentRender.cmtime
} else {
time = .zero
}
}
} else {
if let currentRender = renderSource?.getAudioOutputRender() {
time = currentRender.cmtime
} else {
time = .zero
}
}
synchronizer.setRate(playbackRate, time: time)
//
renderSource?.setAudio(time: time, position: -1)
renderer.requestMediaDataWhenReady(on: serializationQueue) { [weak self] in
guard let self else {
return
}
self.request()
}
periodicTimeObserver = synchronizer.addPeriodicTimeObserver(forInterval: CMTime(seconds: 0.01), queue: .main) { [weak self] time in
guard let self else {
return
}
self.renderSource?.setAudio(time: time, position: -1)
}
}
public func pause() {
synchronizer.rate = 0
renderer.stopRequestingMediaData()
if let periodicTimeObserver {
synchronizer.removeTimeObserver(periodicTimeObserver)
self.periodicTimeObserver = nil
}
}
public func flush() {
renderer.flush()
}
private func request() {
while renderer.isReadyForMoreMediaData, !isPaused {
guard var render = renderSource?.getAudioOutputRender() else {
break
}
var array = [render]
let loopCount = Int32(render.audioFormat.sampleRate) / 20 / Int32(render.numberOfSamples) - 2
if loopCount > 0 {
for _ in 0 ..< loopCount {
if let render = renderSource?.getAudioOutputRender() {
array.append(render)
}
}
}
if array.count > 1 {
render = AudioFrame(array: array)
}
if let sampleBuffer = render.toCMSampleBuffer() {
let channelCount = render.audioFormat.channelCount
renderer.audioTimePitchAlgorithm = channelCount > 2 ? .spectral : .timeDomain
renderer.enqueue(sampleBuffer)
#if !os(macOS)
if AVAudioSession.sharedInstance().preferredInputNumberOfChannels != channelCount {
try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(Int(channelCount))
}
#endif
}
}
}
}