7 C
New York
Thursday, November 28, 2024

ios – AVAssetWriter Video Returns 0 Length


In Swift for iOS, I’ve a digital camera app that makes use of a longPress gesture and AVAssetWriter to document and save movies to Firebase Storage.

After I enter the saved video obtain URL from Firebase Storage right into a browser, the video performs nice.

After I fetch and attempt to play the video from my iOS app, the AVURLAsset returns a period of 0. Equally, when I attempt to obtain the video from the browser and play it by way of Quicktime, the video would not play, as a result of once more it has a period size of zero.

I consider the problem stems from sampleBuffer timestamp battle, however I’m admittedly new to AVFoundation.

If anybody can supply some steerage primarily based on my code that will be a lot appreciated.

class CaptureDelegate: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {

enum CaptureState {
    case idle, capturing, ending
}

var captureState = CaptureState.idle

personal var sessionStartTime: CMTime?

var lastAudioFrameTime: CMTime = .zero
var lastVideoFrameTime: CMTime = .zero

var firstVideoSampleBuffer: CMSampleBuffer?
var firstAudioSampleBuffer: CMSampleBuffer?

var videoOutput: AVCaptureVideoDataOutput!
var audioOutput: AVCaptureAudioDataOutput!

var assetWriter: AVAssetWriter!
var videoWriterInput: AVAssetWriterInput?
var audioWriterInput: AVAssetWriterInput?

let captureQueue = DispatchQueue(label: "CaptureQueue")

var didFetchFirstVideoFrameBuffer = false
var isFinishingCapture = false

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    
    let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
            
    swap captureState {
        
    case .capturing:
        print("CaptureState Capturing: Acquired pattern buffer at timestamp (timestamp.seconds) seconds")
        handleCapturingState(output: output, sampleBuffer: sampleBuffer, timestamp: timestamp)
        
    case .ending:
        print("CaptureState Ending: Acquired pattern buffer at timestamp (timestamp.seconds) seconds")
        handleEndingState(output: output, sampleBuffer: sampleBuffer, timestamp: timestamp)
        
    default:
        break
    }
}

personal func handleCapturingState(output: AVCaptureOutput, sampleBuffer: CMSampleBuffer, timestamp: CMTime) {
    if output == videoOutput {
        // Buffer the primary video pattern buffer
        if firstVideoSampleBuffer == nil {
            firstVideoSampleBuffer = sampleBuffer
            print("Buffered first video pattern buffer at: (timestamp.seconds)")
        }
    } else if output == audioOutput {
        lastAudioFrameTime = timestamp
        
        // Buffer the primary audio pattern buffer
        if firstAudioSampleBuffer == nil {
            firstAudioSampleBuffer = sampleBuffer
            print("Buffered first audio pattern buffer at: (timestamp.seconds)")
        }
    }
    
    // Begin session solely when each first audio and video buffers can be found and author is prepared
    if firstVideoSampleBuffer != nil && firstAudioSampleBuffer != nil && assetWriter?.standing == .unknown {
        
        // Decide the earliest timestamp
        let videoTimestamp = CMSampleBufferGetPresentationTimeStamp(firstVideoSampleBuffer!)
        let audioTimestamp = CMSampleBufferGetPresentationTimeStamp(firstAudioSampleBuffer!)
        let startTime = CMTimeMinimum(videoTimestamp, audioTimestamp)
        
        // Set sessionStartTime
        sessionStartTime = startTime
        print("Session begin time: (sessionStartTime!.seconds) seconds")
        
        // Begin writing and session on the serial queue
        captureQueue.async {
            self.assetWriter?.startWriting()
            self.assetWriter?.startSession(atSourceTime: startTime)
            print("Session began at timestamp: (startTime.seconds)")
            
            // Append the primary video buffer
            if self.videoWriterInput?.isReadyForMoreMediaData == true {
                let success = self.videoWriterInput?.append(self.firstVideoSampleBuffer!) ?? false
                print(success ? "Appended first video buffer at: (videoTimestamp.seconds)" : "Did not append first video buffer.")
            }
            
            // Append the primary audio buffer
            if self.audioWriterInput?.isReadyForMoreMediaData == true {
                let success = self.audioWriterInput?.append(self.firstAudioSampleBuffer!) ?? false
                print(success ? "Appended first audio buffer at: (audioTimestamp.seconds)" : "Did not append first audio buffer.")
            }
            
            // Clear the primary buffers to forestall reusing them
            self.firstVideoSampleBuffer = nil
            self.firstAudioSampleBuffer = nil
            
            self.didFetchFirstVideoFrameBuffer = true
        }
    }
    
    // Append subsequent buffers
    if didFetchFirstVideoFrameBuffer {
        captureQueue.async {
            if output == self.videoOutput {
                
                if self.videoWriterInput?.isReadyForMoreMediaData == true {
                                            
                    if let sessionStart = self.sessionStartTime {
                        let relativeTimestamp = CMTimeSubtract(timestamp, sessionStart)
                        self.videoWriterInput?.append(sampleBuffer)
                        self.lastVideoFrameTime = relativeTimestamp
                        print("Appended video buffer at: (relativeTimestamp.seconds) seconds")
                    } else {
                        // Fallback if sessionStartTime is nil
                        self.videoWriterInput?.append(sampleBuffer)
                        self.lastVideoFrameTime = timestamp
                        print("Appended video buffer at: (timestamp.seconds) seconds (fallback)")
                    }
                    
                    // THIS RESULTS IN VIDEO WITH A DURATION BUT AUDIO/PICTURE ARE OUT OF SYNC
                    //                        self.videoWriterInput?.append(sampleBuffer)
                    //                        self.lastVideoFrameTime = timestamp
                    //                        print("Appended video buffer at: (timestamp.seconds)")
                }
                
            } else if output == self.audioOutput {
                if self.audioWriterInput?.isReadyForMoreMediaData == true {
                                            
                    // Calculate relative timestamp
                    if let sessionStart = self.sessionStartTime {
                        let relativeTimestamp = CMTimeSubtract(timestamp, sessionStart)
                        self.audioWriterInput?.append(sampleBuffer)
                        self.lastAudioFrameTime = relativeTimestamp
                        print("Appended audio buffer at: (relativeTimestamp.seconds) seconds")
                    } else {
                        // Fallback if sessionStartTime is nil
                        self.audioWriterInput?.append(sampleBuffer)
                        self.lastAudioFrameTime = timestamp
                        print("Appended audio buffer at: (timestamp.seconds) seconds (fallback)")
                    }
                    
                    // THIS RESULTS IN VIDEO WITH A DURATION BUT AUDIO/PICTURE ARE OUT OF SYNC
                    //                        self.audioWriterInput?.append(sampleBuffer)
                    //                        self.lastAudioFrameTime = timestamp
                    //                        print("Appended audio buffer at: (timestamp.seconds)")
                }
            }
        }
    }
}
    
personal func handleEndingState(output: AVCaptureOutput, sampleBuffer: CMSampleBuffer, timestamp: CMTime) {
    print("seize ending")
    print("assetWriter.standing: (assetWriter?.standing.rawValue ?? -1)")
    
    if assetWriter?.standing == .writing && !isFinishingCapture {
        // Deal with video seize finalization
        let endTimeCMTime = CMTimeMaximum(lastVideoFrameTime, lastAudioFrameTime)
        print("finalizing seize at endTimeCMTime: (endTimeCMTime.seconds) seconds")
        
        captureState = .idle
        isFinishingCapture = true
        
        finalizeVideoCapture(endTimeCMTime)
    }
}

func finalizeVideoCapture(_ endTimeCMTime: CMTime) {
    captureQueue.async {
        print("Finalizing video seize at endTimeCMTime: (endTimeCMTime.seconds) seconds")
        
        // Mark inputs as completed
        self.audioWriterInput?.markAsFinished()
        self.videoWriterInput?.markAsFinished()
        
        // Finish the session on the final buffer's timestamp
        self.assetWriter?.endSession(atSourceTime: endTimeCMTime)
        
        // End writing
        self.assetWriter?.finishWriting { [weak self] in
            guard let self = self, let capturedURL = self.assetWriter?.outputURL else { return }
            
            if self.assetWriter.standing == .accomplished {
                print("Video writing accomplished efficiently.")
                DispatchQueue.major.async {
                    self.saveToFirebaseStorage(outputURL: capturedURL, picture: nil)
                }
            } else {
                if let error = self.assetWriter.error {
                    print("Error ending writing: (error.localizedDescription)")
                } else {
                    print("Video writing failed with out a particular error.")
                }
                // Deal with the failure accordingly
            }
        }
    }
}
}

Related Articles

LEAVE A REPLY

Please enter your comment!
Please enter your name here

Latest Articles