ios – Swift VLC drawing to AVSampleBufferDisplayLayer with PiP

0
10
ios – Swift VLC drawing to AVSampleBufferDisplayLayer with PiP


I utilizing VLClibrary to geting picture frames in format “BGRA”. Once I get image from VLC it’s name operate render() the place picture knowledge are current in variable userData.img
I’ve two points with show video photos.

  1. In iOS 16.x video is just not show on display screen, iOS 18.x show video with out subject.
  2. Each iOS model after activate PiP, show PIP in black window with controls however animate video keep in principal view and is canopy by image PiP.
class VideoRenderer : UIView     {

    let displayLayer = AVSampleBufferDisplayLayer()
    
    non-public var pipController: AVPictureInPictureController?
    var userData = mydata_t()
    weak var delegate : VLCPlayer?
    non-public var frameIndex: Int64 = 0
    non-public var fps: Int32 = 25
    non-public let maxFrameWindow = 60
    non-public var frameTimes: [Double] = []
    non-public var timebase: CMTimebase?
    
    override class var layerClass: AnyClass {
        return AVSampleBufferDisplayLayer.self
    }
    
    
    override init(body: CGRect) {
        tremendous.init(body: body)
        setupViewAndPiP()
    }
    
    required init?(coder: NSCoder) {
        tremendous.init(coder: coder)
        setupViewAndPiP()
    }
    override func layoutSubviews() {
        tremendous.layoutSubviews()
        displayLayer.body = bounds
        print("Change view: (bounds.width)x(bounds.top)")
    }
    
    override func didMoveToWindow() {
        tremendous.didMoveToWindow()
        print("Window connected: (window != nil)")
        if window != nil {
            displayLayer.body = bounds
            if displayLayer.superlayer == nil {
                layer.addSublayer(displayLayer)
            }
        }
    }

    non-public func setupTimebase() {
        CMTimebaseCreateWithSourceClock(allocator: kCFAllocatorDefault, sourceClock: CMClockGetHostTimeClock(), timebaseOut: &timebase)
        if let tb = timebase {
            timebase = tb
            CMTimebaseSetTime(tb, time: CMTime.zero)
            CMTimebaseSetRate(tb, fee: 0.0)
            displayLayer.controlTimebase = tb
        }
    }

    non-public func setupViewAndPiP() {
        print("🔍 displayLayer.isReadyForMoreMediaData: (displayLayer.isReadyForMoreMediaData)")

        
        displayLayer.body = bounds
        displayLayer.videoGravity = .resizeAspect
        displayLayer.drawsAsynchronously = true  
//        displayLayer.backgroundColor = UIColor.black.cgColor


        layer.addSublayer(displayLayer)  

        
        setupTimebase()

        
        guard AVPictureInPictureController.isPictureInPictureSupported() else {
            print("PiP not supported on this machine")
            return
        }

        
        let contentSource = AVPictureInPictureController.ContentSource(
            sampleBufferDisplayLayer: displayLayer,
            playbackDelegate: self
        )

        pipController = AVPictureInPictureController(contentSource: contentSource)
        pipController?.delegate = self
        pipController?.requiresLinearPlayback = true
    }

    func resumeTimebase() {
        if let tb = timebase {
            CMTimebaseSetRate(tb, fee: 1.0)
        }
    }
    
    func pauseTimebase() {
        if let tb = timebase {
            CMTimebaseSetRate(tb, fee: 0.0)
        }
    }

    func startPiP() {
        if pipController?.isPictureInPicturePossible == true {
            DispatchQueue.principal.async { [weak self] in
                guard let self = self else { return }
                self.pipController?.startPictureInPicture()
            }
        }

    }
    inner func render() {
        guard 
              let controlTimebase = timebase,
              let img = userData.img,
              displayLayer.isReadyForMoreMediaData else {
            print("❌ Show layer not prepared or lacking dependencies (video knowledge, video timer)")
            return
        }
            

        let currentTime = CMTimebaseGetTime(controlTimebase)
            
        let now = CFAbsoluteTimeGetCurrent()
        let delta = now - userData.lastRenderTime
        userData.lastRenderTime = now

        // Filter out outliers
        if delta > 0.005 && delta < 1.0 {
            frameTimes.append(delta)
            
            if frameTimes.depend > 60 { // hold a max historical past
                frameTimes.removeFirst()
            }

            let avgFrameTime = frameTimes.scale back(0, +) / Double(frameTimes.depend)
            let estimatedFPS = Int32(1.0 / avgFrameTime)

            if estimatedFPS > 0 {
                fps = estimatedFPS
            }
        }
        print("📈 Estimated FPS: (fps)")

        
        let width = Int(userData.width)
        let top = Int(userData.top)
        
        var pixelBuffer: CVPixelBuffer?
        let attrs: [String: Any] = [
            kCVPixelBufferCGImageCompatibilityKey as String: true,
            kCVPixelBufferCGBitmapContextCompatibilityKey as String: true,
            kCVPixelBufferWidthKey as String: width,
            kCVPixelBufferHeightKey as String: height,
            kCVPixelBufferBytesPerRowAlignmentKey as String: width * 4,
            kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA

        ]
        
        let standing = CVPixelBufferCreateWithBytes(
            kCFAllocatorDefault,
            width,
            top,
            kCVPixelFormatType_32BGRA,
            img,
            width * 4,
            nil,
            nil,
            attrs as CFDictionary,
            &pixelBuffer
        )
        
        guard standing == kCVReturnSuccess, let pb = pixelBuffer else { return }
        
        var timingInfo = CMSampleTimingInfo(
            period: .invalid,
            presentationTimeStamp: currentTime,
            decodeTimeStamp: .invalid
        )


        
        var formatDesc: CMVideoFormatDescription?
        CMVideoFormatDescriptionCreateForImageBuffer(
            allocator: kCFAllocatorDefault,
//            codecType: kCMPixelFormat_32BGRA,
            imageBuffer: pb,
            formatDescriptionOut: &formatDesc
        )
        
        guard let format = formatDesc else { return }
        
        print("🎥 Enqueuing body with pts: (timingInfo.presentationTimeStamp.seconds)")
        
        var sampleBuffer: CMSampleBuffer?
        CMSampleBufferCreateForImageBuffer(
            allocator: kCFAllocatorDefault,
            imageBuffer: pb,
            dataReady: true,
            makeDataReadyCallback: nil,
            refcon: nil,
            formatDescription: format,
            sampleTiming: &timingInfo,
            sampleBufferOut: &sampleBuffer
        )

        
        if let sb = sampleBuffer {
            if CMSampleBufferIsValid(sb) {
                if CMSampleBufferGetPresentationTimeStamp(sb) == .invalid {
                    print("Invalid video timestamp")
                    
                }
                
                DispatchQueue.principal.async { [weak self] in
                    guard let self = self else { return }
                    if (displayLayer.standing == .failed) {
                        displayLayer.flush()
                    }
                    displayLayer.enqueue(sb)
                }
                frameIndex += 1
            } else {
                print("Pattern buffer is invalid!!!!")
            }
        }
    }
extension VideoRenderer: AVPictureInPictureSampleBufferPlaybackDelegate {
    func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, didTransitionToRenderSize newRenderSize: CMVideoDimensions) {
        print("📏 PiP window dimension modified to: (newRenderSize.width)x(newRenderSize.top)")
    }
    
    
    func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, skipByInterval skipInterval: CMTime) async {
        print("⏩ PiP requested skip by: (CMTimeGetSeconds(skipInterval)) seconds — no-op for reside/stream playback")
    }
    
    func pictureInPictureController(_ controller: AVPictureInPictureController, setPlaying taking part in: Bool) {
        print("PiP needs to: (taking part in ? "play" : "pause")")
        delegate?.setPlaying(setPlaying: taking part in)
        // You may set off libvlc_media_player_pause() right here if wanted
    }

    func pictureInPictureControllerTimeRangeForPlayback(_ controller: AVPictureInPictureController) -> CMTimeRange {
        print("PiP -> pictureInPictureControllerTimeRangeForPlayback")
        return CMTimeRange(begin: .negativeInfinity, period: .positiveInfinity)
    }

    func pictureInPictureControllerIsPlaybackPaused(_ controller: AVPictureInPictureController) -> Bool {
        print("PiP -> pictureInPictureControllerIsPlaybackPaused - Begin")
        if let isPlaying = delegate?.isPlaying() {
            print("PiP -> pictureInPictureControllerIsPlaybackPaused - standing: (isPlaying ? "play" : "pause")")
            return isPlaying // or true should you paused VLC
        } else {
            return false
        }
    }
}

extension VideoRenderer: AVPictureInPictureControllerDelegate {
    func pictureInPictureController(_ controller: AVPictureInPictureController, restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void) {
        // Deal with PiP exit (like exhibiting UI once more)
        print("PiP -> pictureInPictureController - Begin")
        completionHandler(true)
    }
    
    func pictureInPictureControllerWillStartPictureInPicture(_ controller: AVPictureInPictureController) {
        print("🎬 PiP will begin")
    }

    func pictureInPictureControllerDidStartPictureInPicture(_ controller: AVPictureInPictureController) {
        print("✅ PiP began")
    }

    func pictureInPictureControllerWillStopPictureInPicture(_ controller: AVPictureInPictureController) {
        print("🛑 PiP will cease")
    }

    func pictureInPictureControllerDidStopPictureInPicture(_ controller: AVPictureInPictureController) {
        print("✔️ PiP stopped")
    }
    func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, failedToStartPictureInPictureWithError error: Error) {
        print("(#operate)")
        print("pip error: (error)")
    }
}

LEAVE A REPLY

Please enter your comment!
Please enter your name here