Initial commit: SimVision tvOS streaming app

Features:
- VOD library with movie grouping and version detection
- TV show library with season/episode organization
- TMDB integration for trending shows and recently aired episodes
- Recent releases section with TMDB release date sorting
- Watch history tracking with continue watching
- Playlist caching (12-hour TTL) for offline support
- M3U playlist parsing with XStream API support
- Authentication with credential storage

Technical:
- SwiftUI for tvOS
- Actor-based services for thread safety
- Persistent caching for playlists, TMDB data, and watch history
- KSPlayer integration for video playback

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-21 22:12:08 -06:00
commit 872354b834
283 changed files with 338296 additions and 0 deletions

View File

@@ -0,0 +1,299 @@
//
// DisplayModel.swift
// KSPlayer-iOS
//
// Created by kintan on 2020/1/11.
//
import Foundation
import Metal
import simd
#if canImport(UIKit)
import UIKit
#endif
extension DisplayEnum {
private static var planeDisplay = PlaneDisplayModel()
private static var vrDiaplay = VRDisplayModel()
private static var vrBoxDiaplay = VRBoxDisplayModel()
func set(encoder: MTLRenderCommandEncoder) {
switch self {
case .plane:
DisplayEnum.planeDisplay.set(encoder: encoder)
case .vr:
DisplayEnum.vrDiaplay.set(encoder: encoder)
case .vrBox:
DisplayEnum.vrBoxDiaplay.set(encoder: encoder)
}
}
func pipeline(planeCount: Int, bitDepth: Int32) -> MTLRenderPipelineState {
switch self {
case .plane:
return DisplayEnum.planeDisplay.pipeline(planeCount: planeCount, bitDepth: bitDepth)
case .vr:
return DisplayEnum.vrDiaplay.pipeline(planeCount: planeCount, bitDepth: bitDepth)
case .vrBox:
return DisplayEnum.vrBoxDiaplay.pipeline(planeCount: planeCount, bitDepth: bitDepth)
}
}
func touchesMoved(touch: UITouch) {
switch self {
case .vr:
DisplayEnum.vrDiaplay.touchesMoved(touch: touch)
case .vrBox:
DisplayEnum.vrBoxDiaplay.touchesMoved(touch: touch)
default:
break
}
}
}
private class PlaneDisplayModel {
private lazy var yuv = MetalRender.makePipelineState(fragmentFunction: "displayYUVTexture")
private lazy var yuvp010LE = MetalRender.makePipelineState(fragmentFunction: "displayYUVTexture", bitDepth: 10)
private lazy var nv12 = MetalRender.makePipelineState(fragmentFunction: "displayNV12Texture")
private lazy var p010LE = MetalRender.makePipelineState(fragmentFunction: "displayNV12Texture", bitDepth: 10)
private lazy var bgra = MetalRender.makePipelineState(fragmentFunction: "displayTexture")
let indexCount: Int
let indexType = MTLIndexType.uint16
let primitiveType = MTLPrimitiveType.triangleStrip
let indexBuffer: MTLBuffer
let posBuffer: MTLBuffer?
let uvBuffer: MTLBuffer?
fileprivate init() {
let (indices, positions, uvs) = PlaneDisplayModel.genSphere()
let device = MetalRender.device
indexCount = indices.count
indexBuffer = device.makeBuffer(bytes: indices, length: MemoryLayout<UInt16>.size * indexCount)!
posBuffer = device.makeBuffer(bytes: positions, length: MemoryLayout<simd_float4>.size * positions.count)
uvBuffer = device.makeBuffer(bytes: uvs, length: MemoryLayout<simd_float2>.size * uvs.count)
}
private static func genSphere() -> ([UInt16], [simd_float4], [simd_float2]) {
let indices: [UInt16] = [0, 1, 2, 3]
let positions: [simd_float4] = [
[-1.0, -1.0, 0.0, 1.0],
[-1.0, 1.0, 0.0, 1.0],
[1.0, -1.0, 0.0, 1.0],
[1.0, 1.0, 0.0, 1.0],
]
let uvs: [simd_float2] = [
[0.0, 1.0],
[0.0, 0.0],
[1.0, 1.0],
[1.0, 0.0],
]
return (indices, positions, uvs)
}
func set(encoder: MTLRenderCommandEncoder) {
encoder.setFrontFacing(.clockwise)
encoder.setVertexBuffer(posBuffer, offset: 0, index: 0)
encoder.setVertexBuffer(uvBuffer, offset: 0, index: 1)
encoder.drawIndexedPrimitives(type: primitiveType, indexCount: indexCount, indexType: indexType, indexBuffer: indexBuffer, indexBufferOffset: 0)
}
func pipeline(planeCount: Int, bitDepth: Int32) -> MTLRenderPipelineState {
switch planeCount {
case 3:
if bitDepth == 10 {
return yuvp010LE
} else {
return yuv
}
case 2:
if bitDepth == 10 {
return p010LE
} else {
return nv12
}
case 1:
return bgra
default:
return bgra
}
}
}
@MainActor
private class SphereDisplayModel {
private lazy var yuv = MetalRender.makePipelineState(fragmentFunction: "displayYUVTexture", isSphere: true)
private lazy var yuvp010LE = MetalRender.makePipelineState(fragmentFunction: "displayYUVTexture", isSphere: true, bitDepth: 10)
private lazy var nv12 = MetalRender.makePipelineState(fragmentFunction: "displayNV12Texture", isSphere: true)
private lazy var p010LE = MetalRender.makePipelineState(fragmentFunction: "displayNV12Texture", isSphere: true, bitDepth: 10)
private lazy var bgra = MetalRender.makePipelineState(fragmentFunction: "displayTexture", isSphere: true)
private var fingerRotationX = Float(0)
private var fingerRotationY = Float(0)
fileprivate var modelViewMatrix = matrix_identity_float4x4
let indexCount: Int
let indexType = MTLIndexType.uint16
let primitiveType = MTLPrimitiveType.triangle
let indexBuffer: MTLBuffer
let posBuffer: MTLBuffer?
let uvBuffer: MTLBuffer?
@MainActor
fileprivate init() {
let (indices, positions, uvs) = SphereDisplayModel.genSphere()
let device = MetalRender.device
indexCount = indices.count
indexBuffer = device.makeBuffer(bytes: indices, length: MemoryLayout<UInt16>.size * indexCount)!
posBuffer = device.makeBuffer(bytes: positions, length: MemoryLayout<simd_float4>.size * positions.count)
uvBuffer = device.makeBuffer(bytes: uvs, length: MemoryLayout<simd_float2>.size * uvs.count)
#if canImport(UIKit) && canImport(CoreMotion)
if KSOptions.enableSensor {
MotionSensor.shared.start()
}
#endif
}
func set(encoder: MTLRenderCommandEncoder) {
encoder.setFrontFacing(.clockwise)
encoder.setVertexBuffer(posBuffer, offset: 0, index: 0)
encoder.setVertexBuffer(uvBuffer, offset: 0, index: 1)
#if canImport(UIKit) && canImport(CoreMotion)
if KSOptions.enableSensor, let matrix = MotionSensor.shared.matrix() {
modelViewMatrix = matrix
}
#endif
}
@MainActor
func touchesMoved(touch: UITouch) {
#if canImport(UIKit)
let view = touch.view
#else
let view: UIView? = nil
#endif
var distX = Float(touch.location(in: view).x - touch.previousLocation(in: view).x)
var distY = Float(touch.location(in: view).y - touch.previousLocation(in: view).y)
distX *= 0.005
distY *= 0.005
fingerRotationX -= distY * 60 / 100
fingerRotationY -= distX * 60 / 100
modelViewMatrix = matrix_identity_float4x4.rotateX(radians: fingerRotationX).rotateY(radians: fingerRotationY)
}
func reset() {
fingerRotationX = 0
fingerRotationY = 0
modelViewMatrix = matrix_identity_float4x4
}
private static func genSphere() -> ([UInt16], [simd_float4], [simd_float2]) {
let slicesCount = UInt16(200)
let parallelsCount = slicesCount / 2
let indicesCount = Int(slicesCount) * Int(parallelsCount) * 6
var indices = [UInt16](repeating: 0, count: indicesCount)
var positions = [simd_float4]()
var uvs = [simd_float2]()
var runCount = 0
let radius = Float(1.0)
let step = (2.0 * Float.pi) / Float(slicesCount)
var i = UInt16(0)
while i <= parallelsCount {
var j = UInt16(0)
while j <= slicesCount {
let vertex0 = radius * sinf(step * Float(i)) * cosf(step * Float(j))
let vertex1 = radius * cosf(step * Float(i))
let vertex2 = radius * sinf(step * Float(i)) * sinf(step * Float(j))
let vertex3 = Float(1.0)
let vertex4 = Float(j) / Float(slicesCount)
let vertex5 = Float(i) / Float(parallelsCount)
positions.append([vertex0, vertex1, vertex2, vertex3])
uvs.append([vertex4, vertex5])
if i < parallelsCount, j < slicesCount {
indices[runCount] = i * (slicesCount + 1) + j
runCount += 1
indices[runCount] = UInt16((i + 1) * (slicesCount + 1) + j)
runCount += 1
indices[runCount] = UInt16((i + 1) * (slicesCount + 1) + (j + 1))
runCount += 1
indices[runCount] = UInt16(i * (slicesCount + 1) + j)
runCount += 1
indices[runCount] = UInt16((i + 1) * (slicesCount + 1) + (j + 1))
runCount += 1
indices[runCount] = UInt16(i * (slicesCount + 1) + (j + 1))
runCount += 1
}
j += 1
}
i += 1
}
return (indices, positions, uvs)
}
func pipeline(planeCount: Int, bitDepth: Int32) -> MTLRenderPipelineState {
switch planeCount {
case 3:
if bitDepth == 10 {
return yuvp010LE
} else {
return yuv
}
case 2:
if bitDepth == 10 {
return p010LE
} else {
return nv12
}
case 1:
return bgra
default:
return bgra
}
}
}
private class VRDisplayModel: SphereDisplayModel {
private let modelViewProjectionMatrix: simd_float4x4
override required init() {
let size = KSOptions.sceneSize
let aspect = Float(size.width / size.height)
let projectionMatrix = simd_float4x4(perspective: Float.pi / 3, aspect: aspect, nearZ: 0.1, farZ: 400.0)
let viewMatrix = simd_float4x4(lookAt: SIMD3<Float>.zero, center: [0, 0, -1000], up: [0, 1, 0])
modelViewProjectionMatrix = projectionMatrix * viewMatrix
super.init()
}
override func set(encoder: MTLRenderCommandEncoder) {
super.set(encoder: encoder)
var matrix = modelViewProjectionMatrix * modelViewMatrix
let matrixBuffer = MetalRender.device.makeBuffer(bytes: &matrix, length: MemoryLayout<simd_float4x4>.size)
encoder.setVertexBuffer(matrixBuffer, offset: 0, index: 2)
encoder.drawIndexedPrimitives(type: primitiveType, indexCount: indexCount, indexType: indexType, indexBuffer: indexBuffer, indexBufferOffset: 0)
}
}
private class VRBoxDisplayModel: SphereDisplayModel {
private let modelViewProjectionMatrixLeft: simd_float4x4
private let modelViewProjectionMatrixRight: simd_float4x4
override required init() {
let size = KSOptions.sceneSize
let aspect = Float(size.width / size.height) / 2
let viewMatrixLeft = simd_float4x4(lookAt: [-0.012, 0, 0], center: [0, 0, -1000], up: [0, 1, 0])
let viewMatrixRight = simd_float4x4(lookAt: [0.012, 0, 0], center: [0, 0, -1000], up: [0, 1, 0])
let projectionMatrix = simd_float4x4(perspective: Float.pi / 3, aspect: aspect, nearZ: 0.1, farZ: 400.0)
modelViewProjectionMatrixLeft = projectionMatrix * viewMatrixLeft
modelViewProjectionMatrixRight = projectionMatrix * viewMatrixRight
super.init()
}
override func set(encoder: MTLRenderCommandEncoder) {
super.set(encoder: encoder)
let layerSize = KSOptions.sceneSize
let width = Double(layerSize.width / 2)
[(modelViewProjectionMatrixLeft, MTLViewport(originX: 0, originY: 0, width: width, height: Double(layerSize.height), znear: 0, zfar: 0)),
(modelViewProjectionMatrixRight, MTLViewport(originX: width, originY: 0, width: width, height: Double(layerSize.height), znear: 0, zfar: 0))].forEach { modelViewProjectionMatrix, viewport in
encoder.setViewport(viewport)
var matrix = modelViewProjectionMatrix * modelViewMatrix
let matrixBuffer = MetalRender.device.makeBuffer(bytes: &matrix, length: MemoryLayout<simd_float4x4>.size)
encoder.setVertexBuffer(matrixBuffer, offset: 0, index: 2)
encoder.drawIndexedPrimitives(type: primitiveType, indexCount: indexCount, indexType: indexType, indexBuffer: indexBuffer, indexBufferOffset: 0)
}
}
}

View File

@@ -0,0 +1,214 @@
//
// MetalRender.swift
// KSPlayer-iOS
//
// Created by kintan on 2020/1/11.
//
import Accelerate
import CoreVideo
import Foundation
import Metal
import QuartzCore
import simd
class MetalRender {
static let device = MTLCreateSystemDefaultDevice()!
static let library: MTLLibrary = {
var library: MTLLibrary!
library = device.makeDefaultLibrary()
if library == nil {
library = try? device.makeDefaultLibrary(bundle: .module)
}
return library
}()
private let renderPassDescriptor = MTLRenderPassDescriptor()
private let commandQueue = MetalRender.device.makeCommandQueue()
private lazy var samplerState: MTLSamplerState? = {
let samplerDescriptor = MTLSamplerDescriptor()
samplerDescriptor.minFilter = .linear
samplerDescriptor.magFilter = .linear
return MetalRender.device.makeSamplerState(descriptor: samplerDescriptor)
}()
private lazy var colorConversion601VideoRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_601_4.pointee.videoRange.buffer
private lazy var colorConversion601FullRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_601_4.pointee.buffer
private lazy var colorConversion709VideoRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_709_2.pointee.videoRange.buffer
private lazy var colorConversion709FullRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_709_2.pointee.buffer
private lazy var colorConversionSMPTE240MVideoRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_SMPTE_240M_1995.videoRange.buffer
private lazy var colorConversionSMPTE240MFullRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_SMPTE_240M_1995.buffer
private lazy var colorConversion2020VideoRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_2020.videoRange.buffer
private lazy var colorConversion2020FullRangeMatrixBuffer: MTLBuffer? = kvImage_YpCbCrToARGBMatrix_ITU_R_2020.buffer
private lazy var colorOffsetVideoRangeMatrixBuffer: MTLBuffer? = {
var firstColumn = SIMD3<Float>(-16.0 / 255.0, -128.0 / 255.0, -128.0 / 255.0)
let buffer = MetalRender.device.makeBuffer(bytes: &firstColumn, length: MemoryLayout<SIMD3<Float>>.size)
buffer?.label = "colorOffset"
return buffer
}()
private lazy var colorOffsetFullRangeMatrixBuffer: MTLBuffer? = {
var firstColumn = SIMD3<Float>(0, -128.0 / 255.0, -128.0 / 255.0)
let buffer = MetalRender.device.makeBuffer(bytes: &firstColumn, length: MemoryLayout<SIMD3<Float>>.size)
buffer?.label = "colorOffset"
return buffer
}()
private lazy var leftShiftMatrixBuffer: MTLBuffer? = {
var firstColumn = SIMD3<UInt8>(1, 1, 1)
let buffer = MetalRender.device.makeBuffer(bytes: &firstColumn, length: MemoryLayout<SIMD3<UInt8>>.size)
buffer?.label = "leftShit"
return buffer
}()
private lazy var leftShiftSixMatrixBuffer: MTLBuffer? = {
var firstColumn = SIMD3<UInt8>(64, 64, 64)
let buffer = MetalRender.device.makeBuffer(bytes: &firstColumn, length: MemoryLayout<SIMD3<UInt8>>.size)
buffer?.label = "leftShit"
return buffer
}()
func clear(drawable: MTLDrawable) {
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0, green: 0, blue: 0, alpha: 0)
renderPassDescriptor.colorAttachments[0].loadAction = .clear
guard let commandBuffer = commandQueue?.makeCommandBuffer(),
let encoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
else {
return
}
encoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
}
@MainActor
func draw(pixelBuffer: PixelBufferProtocol, display: DisplayEnum = .plane, drawable: CAMetalDrawable) {
let inputTextures = pixelBuffer.textures()
renderPassDescriptor.colorAttachments[0].texture = drawable.texture
guard !inputTextures.isEmpty, let commandBuffer = commandQueue?.makeCommandBuffer(), let encoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else {
return
}
encoder.pushDebugGroup("RenderFrame")
let state = display.pipeline(planeCount: pixelBuffer.planeCount, bitDepth: pixelBuffer.bitDepth)
encoder.setRenderPipelineState(state)
encoder.setFragmentSamplerState(samplerState, index: 0)
for (index, texture) in inputTextures.enumerated() {
texture.label = "texture\(index)"
encoder.setFragmentTexture(texture, index: index)
}
setFragmentBuffer(pixelBuffer: pixelBuffer, encoder: encoder)
display.set(encoder: encoder)
encoder.popDebugGroup()
encoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
}
private func setFragmentBuffer(pixelBuffer: PixelBufferProtocol, encoder: MTLRenderCommandEncoder) {
if pixelBuffer.planeCount > 1 {
let buffer: MTLBuffer?
let yCbCrMatrix = pixelBuffer.yCbCrMatrix
let isFullRangeVideo = pixelBuffer.isFullRangeVideo
if yCbCrMatrix == kCVImageBufferYCbCrMatrix_ITU_R_709_2 {
buffer = isFullRangeVideo ? colorConversion709FullRangeMatrixBuffer : colorConversion709VideoRangeMatrixBuffer
} else if yCbCrMatrix == kCVImageBufferYCbCrMatrix_SMPTE_240M_1995 {
buffer = isFullRangeVideo ? colorConversionSMPTE240MFullRangeMatrixBuffer : colorConversionSMPTE240MVideoRangeMatrixBuffer
} else if yCbCrMatrix == kCVImageBufferYCbCrMatrix_ITU_R_2020 {
buffer = isFullRangeVideo ? colorConversion2020FullRangeMatrixBuffer : colorConversion2020VideoRangeMatrixBuffer
} else {
buffer = isFullRangeVideo ? colorConversion601FullRangeMatrixBuffer : colorConversion601VideoRangeMatrixBuffer
}
encoder.setFragmentBuffer(buffer, offset: 0, index: 0)
let colorOffset = isFullRangeVideo ? colorOffsetFullRangeMatrixBuffer : colorOffsetVideoRangeMatrixBuffer
encoder.setFragmentBuffer(colorOffset, offset: 0, index: 1)
let leftShift = pixelBuffer.leftShift == 0 ? leftShiftMatrixBuffer : leftShiftSixMatrixBuffer
encoder.setFragmentBuffer(leftShift, offset: 0, index: 2)
}
}
static func makePipelineState(fragmentFunction: String, isSphere: Bool = false, bitDepth: Int32 = 8) -> MTLRenderPipelineState {
let descriptor = MTLRenderPipelineDescriptor()
descriptor.colorAttachments[0].pixelFormat = KSOptions.colorPixelFormat(bitDepth: bitDepth)
descriptor.vertexFunction = library.makeFunction(name: isSphere ? "mapSphereTexture" : "mapTexture")
descriptor.fragmentFunction = library.makeFunction(name: fragmentFunction)
let vertexDescriptor = MTLVertexDescriptor()
vertexDescriptor.attributes[0].format = .float4
vertexDescriptor.attributes[0].bufferIndex = 0
vertexDescriptor.attributes[0].offset = 0
vertexDescriptor.attributes[1].format = .float2
vertexDescriptor.attributes[1].bufferIndex = 1
vertexDescriptor.attributes[1].offset = 0
vertexDescriptor.layouts[0].stride = MemoryLayout<simd_float4>.stride
vertexDescriptor.layouts[1].stride = MemoryLayout<simd_float2>.stride
descriptor.vertexDescriptor = vertexDescriptor
// swiftlint:disable force_try
return try! library.device.makeRenderPipelineState(descriptor: descriptor)
// swftlint:enable force_try
}
static func texture(pixelBuffer: CVPixelBuffer) -> [MTLTexture] {
guard let iosurface = CVPixelBufferGetIOSurface(pixelBuffer)?.takeUnretainedValue() else {
return []
}
let formats = KSOptions.pixelFormat(planeCount: pixelBuffer.planeCount, bitDepth: pixelBuffer.bitDepth)
return (0 ..< pixelBuffer.planeCount).compactMap { index in
let width = pixelBuffer.widthOfPlane(at: index)
let height = pixelBuffer.heightOfPlane(at: index)
let descriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: formats[index], width: width, height: height, mipmapped: false)
return device.makeTexture(descriptor: descriptor, iosurface: iosurface, plane: index)
}
}
static func textures(formats: [MTLPixelFormat], widths: [Int], heights: [Int], buffers: [MTLBuffer?], lineSizes: [Int]) -> [MTLTexture] {
(0 ..< formats.count).compactMap { i in
guard let buffer = buffers[i] else {
return nil
}
let descriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: formats[i], width: widths[i], height: heights[i], mipmapped: false)
descriptor.storageMode = buffer.storageMode
return buffer.makeTexture(descriptor: descriptor, offset: 0, bytesPerRow: lineSizes[i])
}
}
}
// swiftlint:disable identifier_name
// private let kvImage_YpCbCrToARGBMatrix_ITU_R_601_4 = vImage_YpCbCrToARGBMatrix(Kr: 0.299, Kb: 0.114)
// private let kvImage_YpCbCrToARGBMatrix_ITU_R_709_2 = vImage_YpCbCrToARGBMatrix(Kr: 0.2126, Kb: 0.0722)
private let kvImage_YpCbCrToARGBMatrix_SMPTE_240M_1995 = vImage_YpCbCrToARGBMatrix(Kr: 0.212, Kb: 0.087)
private let kvImage_YpCbCrToARGBMatrix_ITU_R_2020 = vImage_YpCbCrToARGBMatrix(Kr: 0.2627, Kb: 0.0593)
extension vImage_YpCbCrToARGBMatrix {
/**
https://en.wikipedia.org/wiki/YCbCr
@textblock
| R | | 1 0 2-2Kr | | Y' |
| G | = | 1 -Kb * (2 - 2 * Kb) / Kg -Kr * (2 - 2 * Kr) / Kg | | Cb |
| B | | 1 2 - 2 * Kb 0 | | Cr |
@/textblock
*/
init(Kr: Float, Kb: Float) {
let Kg = 1 - Kr - Kb
self.init(Yp: 1, Cr_R: 2 - 2 * Kr, Cr_G: -Kr * (2 - 2 * Kr) / Kg, Cb_G: -Kb * (2 - 2 * Kb) / Kg, Cb_B: 2 - 2 * Kb)
}
var videoRange: vImage_YpCbCrToARGBMatrix {
vImage_YpCbCrToARGBMatrix(Yp: 255 / 219 * Yp, Cr_R: 255 / 224 * Cr_R, Cr_G: 255 / 224 * Cr_G, Cb_G: 255 / 224 * Cb_G, Cb_B: 255 / 224 * Cb_B)
}
var buffer: MTLBuffer? {
var matrix = simd_float3x3([Yp, Yp, Yp], [0.0, Cb_G, Cb_B], [Cr_R, Cr_G, 0.0])
let buffer = MetalRender.device.makeBuffer(bytes: &matrix, length: MemoryLayout<simd_float3x3>.size)
buffer?.label = "colorConversionMatrix"
return buffer
}
}
// swiftlint:enable identifier_name

View File

@@ -0,0 +1,89 @@
//
// MotionSensor.swift
// KSPlayer-iOS
//
// Created by kintan on 2020/1/13.
//
#if canImport(UIKit) && canImport(CoreMotion)
import CoreMotion
import Foundation
import simd
import UIKit
@MainActor
final class MotionSensor {
static let shared = MotionSensor()
private let manager = CMMotionManager()
private let worldToInertialReferenceFrame = simd_float4x4(euler: -90, y: 0, z: 90)
private var deviceToDisplay = simd_float4x4.identity
private let defaultRadiansY: Float
private var orientation = UIInterfaceOrientation.unknown {
didSet {
if oldValue != orientation {
switch orientation {
case .portraitUpsideDown:
deviceToDisplay = simd_float4x4(euler: 0, y: 0, z: 180)
case .landscapeRight:
deviceToDisplay = simd_float4x4(euler: 0, y: 0, z: -90)
case .landscapeLeft:
deviceToDisplay = simd_float4x4(euler: 0, y: 0, z: 90)
default:
deviceToDisplay = simd_float4x4.identity
}
}
}
}
private init() {
switch KSOptions.windowScene?.interfaceOrientation {
case .landscapeRight:
defaultRadiansY = -.pi / 2
case .landscapeLeft:
defaultRadiansY = .pi / 2
default:
defaultRadiansY = 0
}
}
func ready() -> Bool {
manager.isDeviceMotionAvailable ? manager.isDeviceMotionActive : false
}
func start() {
if manager.isDeviceMotionAvailable, !manager.isDeviceMotionActive {
manager.deviceMotionUpdateInterval = 1 / 60
manager.startDeviceMotionUpdates()
}
}
func stop() {
manager.stopDeviceMotionUpdates()
}
func matrix() -> simd_float4x4? {
if var matrix = manager.deviceMotion.flatMap(simd_float4x4.init(motion:)) {
matrix = matrix.transpose
matrix *= worldToInertialReferenceFrame
orientation = KSOptions.windowScene?.interfaceOrientation ?? .portrait
matrix = deviceToDisplay * matrix
matrix = matrix.rotateY(radians: defaultRadiansY)
return matrix
}
return nil
}
}
public extension simd_float4x4 {
init(motion: CMDeviceMotion) {
self.init(rotation: motion.attitude.rotationMatrix)
}
init(rotation: CMRotationMatrix) {
self.init(SIMD4<Float>(Float(rotation.m11), Float(rotation.m12), Float(rotation.m13), 0.0),
SIMD4<Float>(Float(rotation.m21), Float(rotation.m22), Float(rotation.m23), 0.0),
SIMD4<Float>(Float(rotation.m31), Float(rotation.m32), Float(rotation.m33), -1),
SIMD4<Float>(0, 0, 0, 1))
}
}
#endif

View File

@@ -0,0 +1,283 @@
//
// PixelBufferProtocol.swift
// KSPlayer-iOS
//
// Created by kintan on 2019/12/31.
//
import AVFoundation
import CoreVideo
import Foundation
import Libavutil
import simd
import VideoToolbox
#if canImport(UIKit)
import UIKit
#endif
public protocol PixelBufferProtocol: AnyObject {
var width: Int { get }
var height: Int { get }
var bitDepth: Int32 { get }
var leftShift: UInt8 { get }
var planeCount: Int { get }
var formatDescription: CMVideoFormatDescription? { get }
var aspectRatio: CGSize { get set }
var yCbCrMatrix: CFString? { get set }
var colorPrimaries: CFString? { get set }
var transferFunction: CFString? { get set }
var colorspace: CGColorSpace? { get set }
var cvPixelBuffer: CVPixelBuffer? { get }
var isFullRangeVideo: Bool { get }
func cgImage() -> CGImage?
func textures() -> [MTLTexture]
func widthOfPlane(at planeIndex: Int) -> Int
func heightOfPlane(at planeIndex: Int) -> Int
func matche(formatDescription: CMVideoFormatDescription) -> Bool
}
extension PixelBufferProtocol {
var size: CGSize { CGSize(width: width, height: height) }
}
extension CVPixelBuffer: PixelBufferProtocol {
public var leftShift: UInt8 { 0 }
public var cvPixelBuffer: CVPixelBuffer? { self }
public var width: Int { CVPixelBufferGetWidth(self) }
public var height: Int { CVPixelBufferGetHeight(self) }
public var aspectRatio: CGSize {
get {
if let ratio = CVBufferGetAttachment(self, kCVImageBufferPixelAspectRatioKey, nil)?.takeUnretainedValue() as? NSDictionary,
let horizontal = (ratio[kCVImageBufferPixelAspectRatioHorizontalSpacingKey] as? NSNumber)?.intValue,
let vertical = (ratio[kCVImageBufferPixelAspectRatioVerticalSpacingKey] as? NSNumber)?.intValue,
horizontal > 0, vertical > 0
{
return CGSize(width: horizontal, height: vertical)
} else {
return CGSize(width: 1, height: 1)
}
}
set {
if let aspectRatio = newValue.aspectRatio {
CVBufferSetAttachment(self, kCVImageBufferPixelAspectRatioKey, aspectRatio, .shouldPropagate)
}
}
}
var isPlanar: Bool { CVPixelBufferIsPlanar(self) }
public var planeCount: Int { isPlanar ? CVPixelBufferGetPlaneCount(self) : 1 }
public var formatDescription: CMVideoFormatDescription? {
var formatDescription: CMVideoFormatDescription?
let err = CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: self, formatDescriptionOut: &formatDescription)
if err != noErr {
KSLog("Error at CMVideoFormatDescriptionCreateForImageBuffer \(err)")
}
return formatDescription
}
public var isFullRangeVideo: Bool {
CVBufferGetAttachment(self, kCMFormatDescriptionExtension_FullRangeVideo, nil)?.takeUnretainedValue() as? Bool ?? false
}
public var attachmentsDic: CFDictionary? {
CVBufferGetAttachments(self, .shouldPropagate)
}
public var yCbCrMatrix: CFString? {
get {
CVBufferGetAttachment(self, kCVImageBufferYCbCrMatrixKey, nil)?.takeUnretainedValue() as? NSString
}
set {
if let newValue {
CVBufferSetAttachment(self, kCVImageBufferYCbCrMatrixKey, newValue, .shouldPropagate)
}
}
}
public var colorPrimaries: CFString? {
get {
CVBufferGetAttachment(self, kCVImageBufferColorPrimariesKey, nil)?.takeUnretainedValue() as? NSString
}
set {
if let newValue {
CVBufferSetAttachment(self, kCVImageBufferColorPrimariesKey, newValue, .shouldPropagate)
}
}
}
public var transferFunction: CFString? {
get {
CVBufferGetAttachment(self, kCVImageBufferTransferFunctionKey, nil)?.takeUnretainedValue() as? NSString
}
set {
if let newValue {
CVBufferSetAttachment(self, kCVImageBufferTransferFunctionKey, newValue, .shouldPropagate)
}
}
}
public var colorspace: CGColorSpace? {
get {
#if os(macOS)
return CVImageBufferGetColorSpace(self)?.takeUnretainedValue() ?? attachmentsDic.flatMap { CVImageBufferCreateColorSpaceFromAttachments($0)?.takeUnretainedValue() }
#else
return attachmentsDic.flatMap { CVImageBufferCreateColorSpaceFromAttachments($0)?.takeUnretainedValue() }
#endif
}
set {
if let newValue {
CVBufferSetAttachment(self, kCVImageBufferCGColorSpaceKey, newValue, .shouldPropagate)
}
}
}
public var bitDepth: Int32 {
CVPixelBufferGetPixelFormatType(self).bitDepth
}
public func cgImage() -> CGImage? {
var cgImage: CGImage?
VTCreateCGImageFromCVPixelBuffer(self, options: nil, imageOut: &cgImage)
return cgImage
}
public func widthOfPlane(at planeIndex: Int) -> Int {
CVPixelBufferGetWidthOfPlane(self, planeIndex)
}
public func heightOfPlane(at planeIndex: Int) -> Int {
CVPixelBufferGetHeightOfPlane(self, planeIndex)
}
func baseAddressOfPlane(at planeIndex: Int) -> UnsafeMutableRawPointer? {
CVPixelBufferGetBaseAddressOfPlane(self, planeIndex)
}
public func textures() -> [MTLTexture] {
MetalRender.texture(pixelBuffer: self)
}
public func matche(formatDescription: CMVideoFormatDescription) -> Bool {
CMVideoFormatDescriptionMatchesImageBuffer(formatDescription, imageBuffer: self)
}
}
class PixelBuffer: PixelBufferProtocol {
let bitDepth: Int32
let width: Int
let height: Int
let planeCount: Int
var aspectRatio: CGSize
let leftShift: UInt8
let isFullRangeVideo: Bool
var cvPixelBuffer: CVPixelBuffer? { nil }
var colorPrimaries: CFString?
var transferFunction: CFString?
var yCbCrMatrix: CFString?
var colorspace: CGColorSpace?
var formatDescription: CMVideoFormatDescription? = nil
private let format: AVPixelFormat
private let formats: [MTLPixelFormat]
private let widths: [Int]
private let heights: [Int]
private let buffers: [MTLBuffer?]
private let lineSize: [Int]
init(frame: AVFrame) {
yCbCrMatrix = frame.colorspace.ycbcrMatrix
colorPrimaries = frame.color_primaries.colorPrimaries
transferFunction = frame.color_trc.transferFunction
colorspace = KSOptions.colorSpace(ycbcrMatrix: yCbCrMatrix, transferFunction: transferFunction)
width = Int(frame.width)
height = Int(frame.height)
isFullRangeVideo = frame.color_range == AVCOL_RANGE_JPEG
aspectRatio = frame.sample_aspect_ratio.size
format = AVPixelFormat(rawValue: frame.format)
leftShift = format.leftShift
bitDepth = format.bitDepth
planeCount = Int(format.planeCount)
let desc = av_pix_fmt_desc_get(format)?.pointee
let chromaW = desc?.log2_chroma_w == 1 ? 2 : 1
let chromaH = desc?.log2_chroma_h == 1 ? 2 : 1
switch planeCount {
case 3:
widths = [width, width / chromaW, width / chromaW]
heights = [height, height / chromaH, height / chromaH]
case 2:
widths = [width, width / chromaW]
heights = [height, height / chromaH]
default:
widths = [width]
heights = [height]
}
formats = KSOptions.pixelFormat(planeCount: planeCount, bitDepth: bitDepth)
var buffers = [MTLBuffer?]()
var lineSize = [Int]()
let bytes = Array(tuple: frame.data)
let bytesPerRow = Array(tuple: frame.linesize).compactMap { Int($0) }
for i in 0 ..< planeCount {
let alignment = MetalRender.device.minimumLinearTextureAlignment(for: formats[i])
lineSize.append(bytesPerRow[i].alignment(value: alignment))
let buffer: MTLBuffer?
let size = lineSize[i]
let byteCount = bytesPerRow[i]
let height = heights[i]
if byteCount == size {
buffer = MetalRender.device.makeBuffer(bytes: bytes[i]!, length: height * size)
} else {
buffer = MetalRender.device.makeBuffer(length: heights[i] * lineSize[i])
let contents = buffer?.contents()
let source = bytes[i]!
var j = 0
// while > stride(from:to:by:) > for in
while j < height {
contents?.advanced(by: j * size).copyMemory(from: source.advanced(by: j * byteCount), byteCount: byteCount)
j += 1
}
}
buffers.append(buffer)
}
self.lineSize = lineSize
self.buffers = buffers
}
func textures() -> [MTLTexture] {
MetalRender.textures(formats: formats, widths: widths, heights: heights, buffers: buffers, lineSizes: lineSize)
}
func widthOfPlane(at planeIndex: Int) -> Int {
widths[planeIndex]
}
func heightOfPlane(at planeIndex: Int) -> Int {
heights[planeIndex]
}
func cgImage() -> CGImage? {
let image: CGImage?
if format == AV_PIX_FMT_RGB24 {
image = CGImage.make(rgbData: buffers[0]!.contents().assumingMemoryBound(to: UInt8.self), linesize: Int(lineSize[0]), width: width, height: height)
} else {
let scale = VideoSwresample(isDovi: false)
image = scale.transfer(format: format, width: Int32(width), height: Int32(height), data: buffers.map { $0?.contents().assumingMemoryBound(to: UInt8.self) }, linesize: lineSize.map { Int32($0) })?.cgImage()
scale.shutdown()
}
return image
}
public func matche(formatDescription: CMVideoFormatDescription) -> Bool {
self.formatDescription == formatDescription
}
}
extension CGSize {
var aspectRatio: NSDictionary? {
if width != 0, height != 0, width != height {
return [kCVImageBufferPixelAspectRatioHorizontalSpacingKey: width,
kCVImageBufferPixelAspectRatioVerticalSpacingKey: height]
} else {
return nil
}
}
}

View File

@@ -0,0 +1,103 @@
//
// Shaders.metal
#include <metal_stdlib>
using namespace metal;
struct VertexIn
{
float4 pos [[attribute(0)]];
float2 uv [[attribute(1)]];
};
struct VertexOut {
float4 renderedCoordinate [[position]];
float2 textureCoordinate;
};
vertex VertexOut mapTexture(VertexIn input [[stage_in]]) {
VertexOut outVertex;
outVertex.renderedCoordinate = input.pos;
outVertex.textureCoordinate = input.uv;
return outVertex;
}
vertex VertexOut mapSphereTexture(VertexIn input [[stage_in]], constant float4x4& uniforms [[ buffer(2) ]]) {
VertexOut outVertex;
outVertex.renderedCoordinate = uniforms * input.pos;
outVertex.textureCoordinate = input.uv;
return outVertex;
}
fragment half4 displayTexture(VertexOut mappingVertex [[ stage_in ]],
texture2d<half, access::sample> texture [[ texture(0) ]]) {
constexpr sampler s(address::clamp_to_edge, filter::linear);
return half4(texture.sample(s, mappingVertex.textureCoordinate));
}
fragment half4 displayYUVTexture(VertexOut in [[ stage_in ]],
texture2d<half> yTexture [[ texture(0) ]],
texture2d<half> uTexture [[ texture(1) ]],
texture2d<half> vTexture [[ texture(2) ]],
sampler textureSampler [[ sampler(0) ]],
constant float3x3& yuvToBGRMatrix [[ buffer(0) ]],
constant float3& colorOffset [[ buffer(1) ]],
constant uchar3& leftShift [[ buffer(2) ]])
{
half3 yuv;
yuv.x = yTexture.sample(textureSampler, in.textureCoordinate).r;
yuv.y = uTexture.sample(textureSampler, in.textureCoordinate).r;
yuv.z = vTexture.sample(textureSampler, in.textureCoordinate).r;
return half4(half3x3(yuvToBGRMatrix)*(yuv*half3(leftShift)+half3(colorOffset)), 1);
}
fragment half4 displayNV12Texture(VertexOut in [[ stage_in ]],
texture2d<half> lumaTexture [[ texture(0) ]],
texture2d<half> chromaTexture [[ texture(1) ]],
sampler textureSampler [[ sampler(0) ]],
constant float3x3& yuvToBGRMatrix [[ buffer(0) ]],
constant float3& colorOffset [[ buffer(1) ]],
constant uchar3& leftShift [[ buffer(2) ]])
{
half3 yuv;
yuv.x = lumaTexture.sample(textureSampler, in.textureCoordinate).r;
yuv.yz = chromaTexture.sample(textureSampler, in.textureCoordinate).rg;
return half4(half3x3(yuvToBGRMatrix)*(yuv*half3(leftShift)+half3(colorOffset)), 1);
}
half3 shaderLinearize(half3 rgb) {
rgb = pow(max(rgb,0), half3(4096.0/(2523 * 128)));
rgb = max(rgb - half3(3424./4096), 0.0) / (half3(2413./4096 * 32) - half3(2392./4096 * 32) * rgb);
rgb = pow(rgb, half3(4096.0 * 4 / 2610));
return rgb;
}
half3 shaderDeLinearize(half3 rgb) {
rgb = pow(max(rgb,0), half3(2610./4096 / 4));
rgb = (half3(3424./4096) - half3(2413./4096 * 32) * rgb) / (half3(1.0) + half3(2392./4096 * 32) * rgb);
rgb = pow(rgb, half3(2523./4096 * 128));
return rgb;
}
fragment half4 displayYCCTexture(VertexOut in [[ stage_in ]],
texture2d<half> lumaTexture [[ texture(0) ]],
texture2d<half> chromaTexture [[ texture(1) ]],
sampler textureSampler [[ sampler(0) ]],
constant float3x3& yuvToBGRMatrix [[ buffer(0) ]],
constant float3& colorOffset [[ buffer(1) ]],
constant uchar3& leftShift [[ buffer(2) ]])
{
half3 ipt;
ipt.x = lumaTexture.sample(textureSampler, in.textureCoordinate).r;
ipt.yz = chromaTexture.sample(textureSampler, in.textureCoordinate).rg;
// half3x3 ipt2lms = half3x3{{1, 0.1952, 0.4104}, {1, -0.2278, 0.2264}, {1, 0.0652, -1.3538}};
// half3x3 lms2rgb = half3x3{{3.238998, -0.719461, -0.002862}, {-2.272734, 1.874998, -0.268066}, {0.086733, -0.158947, 1.074494}};
half3x3 ipt2lms = half3x3{{1, 799/8192, 1681/8192}, {1, -933/8192, 1091/8192}, {1, 267/8192, -5545/8192}};
half3x3 lms2rgb = half3x3{{3.43661, -0.79133, -0.0259499}, {-2.50645, 1.98360, -0.0989137}, {0.06984, -0.192271, 1.12486}};
half3 lms = ipt2lms*ipt;
lms = shaderLinearize(lms);
half3 rgb = lms2rgb*lms;
rgb = shaderDeLinearize(rgb);
return half4(rgb, 1);
}

View File

@@ -0,0 +1,110 @@
//
// Transforms.swift
// MetalSpectrograph
//
// Created by David Conner on 9/9/15.
// Copyright © 2015 Voxxel. All rights reserved.
//
import simd
// swiftlint:disable identifier_name
extension simd_float4x4 {
static let identity = matrix_identity_float4x4
// sx 0 0 0
// 0 sy 0 0
// 0 0 sz 0
// 0 0 0 1
init(scale x: Float, y: Float, z: Float) {
self.init(diagonal: [x, y, z, 1.0])
}
// 1 0 0 tx
// 0 1 0 ty
// 0 0 1 tz
// 0 0 0 1
init(translate: SIMD3<Float>) {
self.init([1, 0.0, 0.0, translate.x],
[0.0, 1, 0.0, translate.y],
[0.0, 0.0, 1, translate.z],
[0.0, 0.0, 0, 1])
}
init(rotationX radians: Float) {
let cos = cosf(radians)
let sin = sinf(radians)
self.init([1, 0.0, 0.0, 0],
[0.0, cos, sin, 0],
[0.0, -sin, cos, 0],
[0.0, 0.0, 0, 1])
}
init(rotationY radians: Float) {
let cos = cosf(radians)
let sin = sinf(radians)
self.init([cos, 0.0, -sin, 0],
[0.0, 1, 0, 0],
[sin, 0, cos, 0],
[0.0, 0.0, 0, 1])
}
init(rotationZ radians: Float) {
let cos = cosf(radians)
let sin = sinf(radians)
self.init([cos, sin, 0.0, 0],
[-sin, cos, 0, 0],
[0.0, 0, 1, 0],
[0.0, 0.0, 0, 1])
}
public init(lookAt eye: SIMD3<Float>, center: SIMD3<Float>, up: SIMD3<Float>) {
let N = normalize(eye - center)
let U = normalize(cross(up, N))
let V = cross(N, U)
self.init(rows: [[U.x, V.x, N.x, 0.0],
[U.y, V.y, N.y, 0.0],
[U.z, V.z, N.z, 0.0],
[dot(-U, eye), dot(-V, eye), dot(-N, eye), 1.0]])
}
public init(perspective fovyRadians: Float, aspect: Float, nearZ: Float, farZ: Float) {
let cotan = 1.0 / tanf(fovyRadians / 2.0)
self.init([cotan / aspect, 0.0, 0.0, 0.0],
[0.0, cotan, 0.0, 0.0],
[0.0, 0.0, (farZ + nearZ) / (nearZ - farZ), -1],
[0.0, 0.0, (2.0 * farZ * nearZ) / (nearZ - farZ), 0])
}
public init(euler x: Float, y: Float, z: Float) {
let x = x * .pi / 180
let y = y * .pi / 180
let z = z * .pi / 180
let cx = cos(x)
let sx = sin(x)
let cy = cos(y)
let sy = sin(y)
let cz = cos(z)
let sz = sin(z)
let cxsy = cx * sy
let sxsy = sx * sy
self.init([cy * cz, -cy * sz, sy, 0.0],
[cxsy * cz + cx * sz, -cxsy * sz + cx * cz, -sx * cy, 0.0],
[-sxsy * cz + sx * sz, sxsy * sz + sx * cz, cx * cy, 0],
[0.0, 0.0, 0, 1])
}
func rotateX(radians: Float) -> simd_float4x4 {
self * simd_float4x4(rotationX: radians)
}
func rotateY(radians: Float) -> simd_float4x4 {
self * simd_float4x4(rotationY: radians)
}
func rotateZ(radians: Float) -> simd_float4x4 {
self * simd_float4x4(rotationZ: radians)
}
}
// swiftlint:enable identifier_name