Hi friends,
I'm trying to setup a basic record and playback audio function in my app. I need the recorded audio to be recorded in a specific format, as it will eventually be passed to a server for further processing. It seems to work fine when I record and playback the audio using either standard "wired" headphones, or just using my laptops built in microphone. However, when I try AirPods, the samplerate changes to 8k and when I try playing it back I hear the same phrases repeated 5 times over.
Below is some sample code of how I'm adding an tap, and converting the audio to Int16 44100 Mono Channel. I then I have sample code of how I would playback the audio, converting it from this format to the default inputFormat.
Any suggestions would be greatly appreciated.
Note: The app is MacOS/Cocoa
func startRecording() {
self.recording = true
let inputNode = self.engine?.inputNode
let bus = 0
let format = inputNode?.inputFormat(forBus: bus)
let recordingFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 44100, channels: 1, interleaved: true)
let formatConverter = AVAudioConverter(from: format!, to: recordingFormat!)
let ratio: Float = Float(format!.sampleRate)/Float(44100.0)
var error: NSError?
inputNode?.installTap(onBus: bus, bufferSize: 1024, format: format) {
(buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
let capacity = UInt32(Float(buffer.frameCapacity)/ratio)
let pcmBuffer = AVAudioPCMBuffer(pcmFormat: recordingFormat!, frameCapacity: capacity)
var error: NSError? = nil
let inputBlock: AVAudioConverterInputBlock = {inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return buffer
}
let status = formatConverter?.convert(to: pcmBuffer!, error: &error, withInputFrom: inputBlock)
print(error)
self.sendAudio(buffer: pcmBuffer!, time: time)
}
self.engine?.prepare()
do {
try self.engine?.start()
} catch {
print("Error")
}
print("started audio")
}
func sendAudio(buffer: AVAudioPCMBuffer, time: AVAudioTime) {
print(buffer.frameCapacity)
let elements = UnsafeBufferPointer(start: buffer.int16ChannelData?[0], count: Int(buffer.frameLength))
let data = Data(buffer: elements)
// rawAudioData.append(data)
playAudio(data)
}
func playAudio(data: Data) {
let recordingFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 44100, channels: 1, interleaved: true)
let buffer = data.makePCMBuffer(format: recordingFormat!)
let formatConverter = AVAudioConverter(from: recordingFormat!, to: playerFormat!)
let pcmBuffer = AVAudioPCMBuffer(pcmFormat: playerFormat!, frameCapacity: buffer!.frameCapacity)
var error: NSError? = nil
let inputBlock: AVAudioConverterInputBlock = {inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return buffer
}
formatConverter?.convert(to: pcmBuffer!, error: &error, withInputFrom: inputBlock)
audioNode!.scheduleBuffer(pcmBuffer!, completionHandler: turnFramePlayerOff)
}