swift – iOS AVmulticamPiP Video recording concern for react-native bridging

0
7
swift – iOS AVmulticamPiP Video recording concern for react-native bridging


For now, I am creating back and front digital camera simultaneous recording and I discovered this code. I added some code snippet. You’ll be able to have a look the complete code on this url (https://github.com/Lax/Be taught-iOS-Swift-by-Examples/blob/grasp/AVCam/Swift/AVCam/CameraViewController.swift)

class MyCameraView: UIViewController, AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate {
  
  // MARK: View Controller Life Cycle
  
  override func viewDidLoad() {
    tremendous.viewDidLoad()
    
    // Permit customers to double faucet to modify between the back and front cameras being in a PiP
    let togglePiPDoubleTapGestureRecognizer = UITapGestureRecognizer(goal: self, motion: #selector(togglePiP))
    togglePiPDoubleTapGestureRecognizer.numberOfTapsRequired = 2
    view.addGestureRecognizer(togglePiPDoubleTapGestureRecognizer)
    
    // Disable UI. Allow the UI later, if and provided that the session begins working.
    recordButton.isEnabled = false
    
    // Arrange the front and back video preview views.
    backCameraVideoPreviewView.videoPreviewLayer.setSessionWithNoConnection(session)
    frontCameraVideoPreviewView.videoPreviewLayer.setSessionWithNoConnection(session)
    
    // Retailer the front and back video preview layers so we are able to join them to their inputs
    backCameraVideoPreviewLayer = backCameraVideoPreviewView.videoPreviewLayer
    frontCameraVideoPreviewLayer = frontCameraVideoPreviewView.videoPreviewLayer
    
    // Retailer the placement of the pip's body in relation to the complete display screen video preview
    updateNormalizedPiPFrame()
    
    UIDevice.present.beginGeneratingDeviceOrientationNotifications()
    
    /*
    Configure the seize session.
    On the whole it isn't secure to mutate an AVCaptureSession or any of its
    inputs, outputs, or connections from a number of threads on the identical time.
    
    Do not do that on the principle queue, as a result of AVCaptureMultiCamSession.startRunning()
    is a blocking name, which might take a very long time. Dispatch session setup
    to the sessionQueue in order to not block the principle queue, which retains the UI responsive.
    */
    sessionQueue.async {
      self.configureSession()
    }
    
    // Hold the display screen awake
    UIApplication.shared.isIdleTimerDisabled = true
  }
  
  override func viewWillAppear(_ animated: Bool) {
    tremendous.viewWillAppear(animated)
    
    sessionQueue.async {
      change self.setupResult {
      case .success:
        // Solely setup observers and begin the session working if setup succeeded.
        self.addObservers()
        self.session.startRunning()
        self.isSessionRunning = self.session.isRunning
        
      case .notAuthorized:
        DispatchQueue.principal.async {
          let changePrivacySetting = "(Bundle.principal.applicationName) does not have permission to make use of the digital camera, please change privateness settings"
          let message = NSLocalizedString(changePrivacySetting, remark: "Alert message when the person has denied entry to the digital camera")
          let alertController = UIAlertController(title: Bundle.principal.applicationName, message: message, preferredStyle: .alert)
          
          alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", remark: "Alert OK button"),
                              fashion: .cancel,
                              handler: nil))
          
          alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", remark: "Alert button to open Settings"),
                              fashion: .`default`,
                              handler: { _ in
                                if let settingsURL = URL(string: UIApplication.openSettingsURLString) {
                                  UIApplication.shared.open(settingsURL,
                                                choices: [:],
                                                completionHandler: nil)
                                }
          }))
          
          self.current(alertController, animated: true, completion: nil)
        }
        
      case .configurationFailed:
        DispatchQueue.principal.async {
          let alertMsg = "Alert message when one thing goes incorrect throughout seize session configuration"
          let message = NSLocalizedString("Unable to seize media", remark: alertMsg)
          let alertController = UIAlertController(title: Bundle.principal.applicationName, message: message, preferredStyle: .alert)
          
          alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", remark: "Alert OK button"),
                              fashion: .cancel,
                              handler: nil))
          
          self.current(alertController, animated: true, completion: nil)
        }
        
      case .multiCamNotSupported:
        DispatchQueue.principal.async {
          let alertMessage = "Alert message when multi cam shouldn't be supported"
          let message = NSLocalizedString("Multi Cam Not Supported", remark: alertMessage)
          let alertController = UIAlertController(title: Bundle.principal.applicationName, message: message, preferredStyle: .alert)
          
          self.current(alertController, animated: true, completion: nil)
        }
      }
    }
  }
  
  override func viewWillDisappear(_ animated: Bool) {
    sessionQueue.async {
      if self.setupResult == .success {
        self.session.stopRunning()
        self.isSessionRunning = self.session.isRunning
        self.removeObservers()
      }
    }
    
    tremendous.viewWillDisappear(animated)
  }
  
  @objc // Expose to Goal-C to be used with #selector()
  personal func didEnterBackground(notification: NSNotification) {
    // Liberate sources.
    dataOutputQueue.async {
      self.renderingEnabled = false
      self.videoMixer.reset()
      self.currentPiPSampleBuffer = nil
    }
  }
  
  @objc // Expose to Goal-C to be used with #selector()
  func willEnterForground(notification: NSNotification) {
    dataOutputQueue.async {
      self.renderingEnabled = true
    }
  }
  
  // MARK: KVO and Notifications
  
  personal var sessionRunningContext = 0
  
  personal var keyValueObservations = [NSKeyValueObservation]()
  
  personal func addObservers() {
    let keyValueObservation = session.observe(.isRunning, choices: .new) { _, change in
      guard let isSessionRunning = change.newValue else { return }
      
      DispatchQueue.principal.async {
        self.recordButton.isEnabled = isSessionRunning
      }
    }
    keyValueObservations.append(keyValueObservation)
    
    let systemPressureStateObservation = observe(.self.backCameraDeviceInput?.system.systemPressureState, choices: .new) { _, change in
      guard let systemPressureState = change.newValue as? AVCaptureDevice.SystemPressureState else { return }
      self.setRecommendedFrameRateRangeForPressureState(systemPressureState)
    }
    keyValueObservations.append(systemPressureStateObservation)
    
    NotificationCenter.default.addObserver(self,
                         selector: #selector(didEnterBackground),
                         title: UIApplication.didEnterBackgroundNotification,
                         object: nil)
    
    NotificationCenter.default.addObserver(self,
                         selector: #selector(willEnterForground),
                         title: UIApplication.willEnterForegroundNotification,
                         object: nil)
    
    NotificationCenter.default.addObserver(self,
                         selector: #selector(sessionRuntimeError),
                         title: .AVCaptureSessionRuntimeError,
                         object: session)
    
    // A session can run solely when the app is full display screen. It is going to be interrupted in a multi-app structure.
    // Add observers to deal with these session interruptions and inform the person.
    // See AVCaptureSessionWasInterruptedNotification for different interruption causes.
    
    NotificationCenter.default.addObserver(self,
                         selector: #selector(sessionWasInterrupted),
                         title: .AVCaptureSessionWasInterrupted,
                         object: session)
    
    NotificationCenter.default.addObserver(self,
                         selector: #selector(sessionInterruptionEnded),
                         title: .AVCaptureSessionInterruptionEnded,
                         object: session)
  }
  
  personal func removeObservers() {
    for keyValueObservation in keyValueObservations {
      keyValueObservation.invalidate()
    }
    
    keyValueObservations.removeAll()
  }
  
  // MARK: Video Preview PiP Administration
  
  personal var pipDevicePosition: AVCaptureDevice.Place = .entrance
  
  personal var normalizedPipFrame = CGRect.zero
  
  @IBOutlet personal var frontCameraPiPConstraints: [NSLayoutConstraint]!
  
  @IBOutlet personal var backCameraPiPConstraints: [NSLayoutConstraint]!
  
  @objc // Expose to Goal-C to be used with #selector()
  personal func togglePiP() {
    // Disable animations so the views transfer instantly
    CATransaction.start()
    UIView.setAnimationsEnabled(false)
    CATransaction.setDisableActions(true)
    
    if pipDevicePosition == .entrance {
      NSLayoutConstraint.deactivate(frontCameraPiPConstraints)
      NSLayoutConstraint.activate(backCameraPiPConstraints)
      view.sendSubviewToBack(frontCameraVideoPreviewView)
      pipDevicePosition = .again
    } else {
      NSLayoutConstraint.deactivate(backCameraPiPConstraints)
      NSLayoutConstraint.activate(frontCameraPiPConstraints)
      view.sendSubviewToBack(backCameraVideoPreviewView)
      pipDevicePosition = .entrance
    }
    
    CATransaction.commit()
    UIView.setAnimationsEnabled(true)
    CATransaction.setDisableActions(false)
  }
  
  personal func updateNormalizedPiPFrame() {
    let fullScreenVideoPreviewView: PreviewView
    let pipVideoPreviewView: PreviewView
    
    if pipDevicePosition == .again {
      fullScreenVideoPreviewView = frontCameraVideoPreviewView
      pipVideoPreviewView = backCameraVideoPreviewView
    } else if pipDevicePosition == .entrance {
      fullScreenVideoPreviewView = backCameraVideoPreviewView
      pipVideoPreviewView = frontCameraVideoPreviewView
    } else {
      fatalError("Surprising pip system place: (pipDevicePosition)")
    }
    
    let pipFrameInFullScreenVideoPreview = pipVideoPreviewView.convert(pipVideoPreviewView.bounds, to: fullScreenVideoPreviewView)
    let normalizedTransform = CGAffineTransform(scaleX: 1.0 / fullScreenVideoPreviewView.body.width, y: 1.0 / fullScreenVideoPreviewView.body.peak)
    
    normalizedPipFrame = pipFrameInFullScreenVideoPreview.making use of(normalizedTransform)
  }
  
  // MARK: Seize Session Administration
  
  @IBOutlet personal var resumeButton: UIButton!
  
  @IBOutlet personal var cameraUnavailableLabel: UILabel!
  
  personal enum SessionSetupResult {
    case success
    case notAuthorized
    case configurationFailed
    case multiCamNotSupported
  }
  
  personal let session = AVCaptureMultiCamSession()
  
  personal var isSessionRunning = false
  
  personal let sessionQueue = DispatchQueue(label: "session queue") // Talk with the session and different session objects on this queue.
  
  personal let dataOutputQueue = DispatchQueue(label: "information output queue")
  
  personal var setupResult: SessionSetupResult = .success
  
  @objc dynamic personal(set) var backCameraDeviceInput: AVCaptureDeviceInput?
  
  personal let backCameraVideoDataOutput = AVCaptureVideoDataOutput()
  
  @IBOutlet personal var backCameraVideoPreviewView: PreviewView!
  
  personal weak var backCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?
  
  personal var frontCameraDeviceInput: AVCaptureDeviceInput?
  
  personal let frontCameraVideoDataOutput = AVCaptureVideoDataOutput()
  
  @IBOutlet personal var frontCameraVideoPreviewView: PreviewView!
  
  personal weak var frontCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?
  
  personal var microphoneDeviceInput: AVCaptureDeviceInput?
  
  personal let backMicrophoneAudioDataOutput = AVCaptureAudioDataOutput()
  
  personal let frontMicrophoneAudioDataOutput = AVCaptureAudioDataOutput()
  
  // Have to be known as on the session queue
  personal func configureSession() {
    guard setupResult == .success else { return }
    
    guard AVCaptureMultiCamSession.isMultiCamSupported else {
      print("MultiCam not supported on this system")
      setupResult = .multiCamNotSupported
      return
    }
    
    // When utilizing AVCaptureMultiCamSession, it's best to manually add connections from AVCaptureInputs to AVCaptureOutputs
    session.beginConfiguration()
    defer {
      session.commitConfiguration()
      if setupResult == .success {
        checkSystemCost()
      }
    }

    guard configureBackCamera() else {
      setupResult = .configurationFailed
      return
    }
    
    guard configureFrontCamera() else {
      setupResult = .configurationFailed
      return
    }
    
    guard configureMicrophone() else {
      setupResult = .configurationFailed
      return
    }
  }
  
  personal func configureBackCamera() -> Bool {
    session.beginConfiguration()
    defer {
      session.commitConfiguration()
    }
    
    // Discover the again digital camera
    guard let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, place: .again) else {
      print("Couldn't discover the again digital camera")
      return false
    }
    
    // Add the again digital camera enter to the session
    do {
      backCameraDeviceInput = strive AVCaptureDeviceInput(system: backCamera)
      
      guard let backCameraDeviceInput = backCameraDeviceInput,
        session.canAddInput(backCameraDeviceInput) else {
          print("Couldn't add again digital camera system enter")
          return false
      }
      session.addInputWithNoConnections(backCameraDeviceInput)
    } catch {
      print("Couldn't create again digital camera system enter: (error)")
      return false
    }
    
    // Discover the again digital camera system enter's video port
    guard let backCameraDeviceInput = backCameraDeviceInput,
      let backCameraVideoPort = backCameraDeviceInput.ports(for: .video,
                                sourceDeviceType: backCamera.deviceType,
                                sourceDevicePosition: backCamera.place).first else {
                                print("Couldn't discover the again digital camera system enter's video port")
                                return false
    }
    
    // Add the again digital camera video information output
    guard session.canAddOutput(backCameraVideoDataOutput) else {
      print("Couldn't add the again digital camera video information output")
      return false
    }
    session.addOutputWithNoConnections(backCameraVideoDataOutput)
    // Test if CVPixelFormat Lossy or Lossless Compression is supported
    
    if backCameraVideoDataOutput.availableVideoPixelFormatTypes.comprises(kCVPixelFormatType_Lossy_32BGRA) {
      // Set the Lossy format
      print("Deciding on lossy pixel format")
      backCameraVideoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_Lossy_32BGRA)]
    } else if backCameraVideoDataOutput.availableVideoPixelFormatTypes.comprises(kCVPixelFormatType_Lossless_32BGRA) {
      // Set the Lossless format
      print("Deciding on a lossless pixel format")
      backCameraVideoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_Lossless_32BGRA)]
    } else {
      // Set to the fallback format
      print("Deciding on a 32BGRA pixel format")
      backCameraVideoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
    }
    
    backCameraVideoDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
    
    // Join the again digital camera system enter to the again digital camera video information output
    let backCameraVideoDataOutputConnection = AVCaptureConnection(inputPorts: [backCameraVideoPort], output: backCameraVideoDataOutput)
    guard session.canAddConnection(backCameraVideoDataOutputConnection) else {
      print("Couldn't add a connection to the again digital camera video information output")
      return false
    }
    session.addConnection(backCameraVideoDataOutputConnection)
    backCameraVideoDataOutputConnection.videoOrientation = .portrait

    // Join the again digital camera system enter to the again digital camera video preview layer
    guard let backCameraVideoPreviewLayer = backCameraVideoPreviewLayer else {
      return false
    }
    let backCameraVideoPreviewLayerConnection = AVCaptureConnection(inputPort: backCameraVideoPort, videoPreviewLayer: backCameraVideoPreviewLayer)
    guard session.canAddConnection(backCameraVideoPreviewLayerConnection) else {
      print("Couldn't add a connection to the again digital camera video preview layer")
      return false
    }
    session.addConnection(backCameraVideoPreviewLayerConnection)
    
    return true
  }

After which for react-native bridge, I made this class

import Basis

@objc(MyCameraViewManager)
public class MyCameraViewManager: RCTViewManager {
  public override func view() -> UIView {
    return MyCameraView()
  }
}

However when I attempt to run this code, getting this error. The way to repair it? enter picture description right here

LEAVE A REPLY

Please enter your comment!
Please enter your name here