I am attempting to make an app to stream audio from a Bluetooth HFP headset mic to the iPhone’s built-in speaker in real-time. However seems like iOS mechanically hyperlinks the mic and speaker to both the headset or iPhone, and I can not discover a option to cut up them. Have you learnt if it is doable to separate it?
Right here is how my perform seems like:
import UIKit
import AVFoundation
import CoreBluetooth
class ViewController: UIViewController, CBCentralManagerDelegate, AVAudioRecorderDelegate, AVAudioPlayerDelegate {
// UI Components
non-public let statusLabel = UILabel()
non-public let listenButton = UIButton(kind: .system)
non-public let recordButton = UIButton(kind: .system)
non-public let playButton = UIButton(kind: .system)
// Bluetooth
non-public var centralManager: CBCentralManager!
non-public var headsetPeripheral: CBPeripheral?
// Audio
non-public var audioSession: AVAudioSession!
non-public var audioEngine: AVAudioEngine!
non-public var inputNode: AVAudioInputNode!
non-public var mixerNode: AVAudioMixerNode!
non-public var isListening = false
non-public var isRecording = false
// Buffer for recording
non-public var audioBuffer = [Float]()
non-public let bufferSize = 44100 * 30 // 30 seconds at 44.1kHz
non-public var recordedAudioURL: URL?
non-public var audioPlayer: AVAudioPlayer?
override func viewDidLoad() {
tremendous.viewDidLoad()
setupUI()
#if targetEnvironment(simulator)
// Working in Simulator
statusLabel.textual content = "Working in simulator - Bluetooth not obtainable"
listenButton.isEnabled = false
// Skip Bluetooth and audio engine setup in simulator
#else
// Working on an actual machine
centralManager = CBCentralManager(delegate: self, queue: nil)
setupAudioSession()
setupAudioEngine()
#endif
}
non-public func setupUI() {
view.backgroundColor = .white
// Standing Label
statusLabel.body = CGRect(x: 20, y: 100, width: view.bounds.width - 40, peak: 40)
statusLabel.textual content = "Initializing..."
statusLabel.textAlignment = .middle
view.addSubview(statusLabel)
// Pay attention Button
listenButton.body = CGRect(x: 20, y: 200, width: view.bounds.width - 40, peak: 50)
listenButton.setTitle("Begin Listening", for: .regular)
listenButton.addTarget(self, motion: #selector(listenButtonTapped), for: .touchUpInside)
listenButton.backgroundColor = .systemBlue
listenButton.setTitleColor(.white, for: .regular)
listenButton.layer.cornerRadius = 8
listenButton.isEnabled = false
view.addSubview(listenButton)
// Report Button
recordButton.body = CGRect(x: 20, y: 300, width: view.bounds.width - 40, peak: 50)
recordButton.setTitle("Report", for: .regular)
recordButton.addTarget(self, motion: #selector(recordButtonTapped), for: .touchUpInside)
recordButton.backgroundColor = .systemRed
recordButton.setTitleColor(.white, for: .regular)
recordButton.layer.cornerRadius = 8
recordButton.isEnabled = false
view.addSubview(recordButton)
// Play Button
playButton.body = CGRect(x: 20, y: 400, width: view.bounds.width - 40, peak: 50)
playButton.setTitle("Play Recording", for: .regular)
playButton.addTarget(self, motion: #selector(playButtonTapped), for: .touchUpInside)
playButton.backgroundColor = .systemGreen
playButton.setTitleColor(.white, for: .regular)
playButton.layer.cornerRadius = 8
playButton.isEnabled = false
view.addSubview(playButton)
}
non-public func setupAudioSession() {
audioSession = AVAudioSession.sharedInstance()
do {
attempt audioSession.setCategory(.playAndRecord, mode: .default, choices: [.allowBluetooth, .defaultToSpeaker])
attempt audioSession.setActive(true)
// Request permission
AVAudioSession.sharedInstance().requestRecordPermission { [weak self] granted in
DispatchQueue.most important.async {
if granted {
self?.statusLabel.textual content = "Microphone permission granted"
// Solely allow the pay attention button if bluetooth is prepared too
if self?.centralManager.state == .poweredOn {
self?.listenButton.isEnabled = true
}
} else {
self?.statusLabel.textual content = "Microphone permission denied"
}
}
}
} catch {
statusLabel.textual content = "Error organising audio session: (error.localizedDescription)"
}
}
non-public func setupAudioEngine() {
audioEngine = AVAudioEngine()
inputNode = audioEngine.inputNode
mixerNode = AVAudioMixerNode()
// Configure the audio format
let format = inputNode.outputFormat(forBus: 0)
// Join nodes
audioEngine.connect(mixerNode)
audioEngine.join(inputNode, to: mixerNode, format: format)
audioEngine.join(mixerNode, to: audioEngine.mainMixerNode, format: format)
// Set up faucet on enter node to seize audio
mixerNode.installTap(onBus: 0, bufferSize: 4096, format: format) { [weak self] buffer, time in
guard let self = self else { return }
// Convert buffer to an array of samples
let channelData = buffer.floatChannelData?[0]
let frameLength = Int(buffer.frameLength)
// Retailer in our round buffer
for i in 0..= self.bufferSize {
self.audioBuffer.removeFirst()
}
self.audioBuffer.append(channelData![i])
}
}
}
// MARK: - Bluetooth Strategies
func centralManagerDidUpdateState(_ central: CBCentralManager) {
change central.state {
case .poweredOn:
statusLabel.textual content = "Bluetooth is powered on. Scanning for headsets..."
// Begin scanning for gadgets that help the Palms-Free Profile (HFP)
centralManager.scanForPeripherals(withServices: [CBUUID(string: "111E")], choices: nil) // 111E is the UUID for HFP
// Allow pay attention button if microphone permission was granted
if #obtainable(iOS 17.0, *) {
if AVAudioApplication.shared.recordPermission == .granted {
listenButton.isEnabled = true
}
} else {
// Fallback for iOS 16 and earlier
if AVAudioSession.sharedInstance().recordPermission == .granted {
listenButton.isEnabled = true
}
}
case .poweredOff:
statusLabel.textual content = "Bluetooth is powered off"
listenButton.isEnabled = false
case .unauthorized, .unsupported:
statusLabel.textual content = "Bluetooth is just not obtainable"
listenButton.isEnabled = false
default:
statusLabel.textual content = "Bluetooth state: (central.state.rawValue)"
listenButton.isEnabled = false
}
}
func centralManager(_ central: CBCentralManager, didDiscover peripheral: CBPeripheral, advertisementData: [String : Any], rssi RSSI: NSNumber) {
// Discovered a peripheral that advertises HFP
statusLabel.textual content = "Discovered headset: (peripheral.identify ?? "Unknown")"
// Retailer the headset and hook up with it
headsetPeripheral = peripheral
centralManager.join(peripheral, choices: nil)
// Cease scanning
centralManager.stopScan()
}
func centralManager(_ central: CBCentralManager, didConnect peripheral: CBPeripheral) {
statusLabel.textual content = "Related to (peripheral.identify ?? "headset")"
// Now that we're linked to the headset, attempt to use its microphone
do {
attempt audioSession.setCategory(.playAndRecord, mode: .default, choices: [.allowBluetooth, .defaultToSpeaker])
attempt audioSession.setActive(true)
listenButton.isEnabled = true
} catch {
statusLabel.textual content = "Error organising audio with Bluetooth: (error.localizedDescription)"
}
}
// MARK: - Button Actions
@objc non-public func listenButtonTapped() {
if isListening {
// Cease listening
stopListening()
listenButton.setTitle("Begin Listening", for: .regular)
} else {
// Begin listening
startListening()
listenButton.setTitle("Cease Listening", for: .regular)
}
}
@objc non-public func recordButtonTapped() {
if isRecording {
// Cease recording
stopRecording()
recordButton.setTitle("Report", for: .regular)
} else {
// Begin recording
startRecording()
recordButton.setTitle("Cease Recording", for: .regular)
}
}
@objc non-public func playButtonTapped() {
playRecording()
}
// MARK: - Audio Strategies
non-public func selectBluetoothInput() {
guard let availableInputs = audioSession.availableInputs else {
statusLabel.textual content = "No audio inputs obtainable"
return
}
// Discover the Bluetooth HFP enter
for enter in availableInputs {
if enter.portType == .bluetoothHFP {
print("Setting most well-liked enter to: (enter)")
do {
attempt enter.setPreferredDataSource(enter.dataSources?.first)
attempt audioSession.setPreferredInput(enter)
statusLabel.textual content = "Utilizing (enter.portName) microphone"
print("Efficiently set Bluetooth as most well-liked enter")
return
} catch {
print("Error setting most well-liked enter: (error)")
statusLabel.textual content = "Error setting Bluetooth enter"
}
}
}
statusLabel.textual content = "No Bluetooth headset microphone discovered"
}
non-public func startListening() {
do {
// Set class with override
attempt audioSession.setCategory(.playAndRecord, mode: .voiceChat, choices: [.allowBluetooth, .defaultToSpeaker])
attempt audioSession.overrideOutputAudioPort(.speaker)
attempt audioSession.setActive(true)
// Debug output
print("Present audio route: (audioSession.currentRoute)")
print("Obtainable inputs: (AVAudioSession.sharedInstance().availableInputs ?? [])")
// Choose Bluetooth headset explicitly
selectBluetoothInput()
// Begin the audio engine
attempt audioEngine.begin()
isListening = true
recordButton.isEnabled = true
statusLabel.textual content = "Listening... If no sound, verify quantity"
} catch {
statusLabel.textual content = "Error beginning audio engine: (error.localizedDescription)"
print("Audio engine error: (error)")
}
}
non-public func stopListening() {
// Cease the audio engine
audioEngine.cease()
isListening = false
// We nonetheless enable recording from the buffer even when we stopped listening
statusLabel.textual content = "Stopped listening. Buffer nonetheless incorporates final 30s."
}
non-public func startRecording() {
// Create a brief URL for the recording
let documentsPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
recordedAudioURL = documentsPath.appendingPathComponent("recording_(Date().timeIntervalSince1970).wav")
// Convert our buffer to an audio file
isRecording = true
statusLabel.textual content = "Recording from buffer..."
// Proceed to document if we're nonetheless listening
if !isListening {
recordButton.setTitle("Cease Recording", for: .regular)
}
}
non-public func stopRecording() {
isRecording = false
guard let url = recordedAudioURL else { return }
// Write the buffer to a file
let format = AVAudioFormat(standardFormatWithSampleRate: 44100, channels: 1)!
do {
let file = attempt AVAudioFile(forWriting: url, settings: format.settings)
// Create a buffer from our array
let bufferFormat = AVAudioFormat(standardFormatWithSampleRate: 44100, channels: 1)!
let buffer = AVAudioPCMBuffer(pcmFormat: bufferFormat, frameCapacity: UInt32(audioBuffer.rely))!
buffer.frameLength = UInt32(audioBuffer.rely)
// Copy our samples to the buffer
let channelData = buffer.floatChannelData![0]
for i in 0..
Thanks all upfront!