Я создаю приложение voip, в котором нам нужно записать звук с микрофона и отправить его куда-нибудь с частотой дискретизации 8 кГц. Прямо сейчас я записываю его с частотой дискретизации по умолчанию, которая в моем случае всегда была 44,1k. Затем он вручную преобразуется в 8k, используя этот алгоритм .
Этот наивный подход приводит к «нормальному» качеству, но я думаю, что было бы намного лучше, если использовать встроенные функции AudioUnit для понижающей дискретизации.
Но когда я изменяю свойство частоты дискретизации в записывающем AudioUnit, оно выводит только бесшумные кадры (~ 0.0), и я не знаю почему.
Я извлек часть приложения, отвечающую за звук. Следует записать с микрофона -> записать в кольцевой буфер -> воспроизвести данные в буфере:
RecordingUnit:
import Foundation
import AudioToolbox
import AVFoundation
class RecordingUnit : NSObject
{
public static let AudioPacketDataSize = 160
public static var instance: RecordingUnit!
public var micBuffers : AudioBufferList?;
public var OutputBuffer = RingBuffer(count: 1 * 1000 * AudioPacketDataSize);
public var currentAudioUnit: AudioUnit?
override init()
{
super.init()
RecordingUnit.instance = self
micBuffers = AudioBufferList(
mNumberBuffers: 1,
mBuffers: AudioBuffer(
mNumberChannels: UInt32(1),
mDataByteSize: UInt32(1024),
mData: UnsafeMutableRawPointer.allocate(byteCount: 1024, alignment: 1)))
}
public func start()
{
var acd = AudioComponentDescription(
componentType: OSType(kAudioUnitType_Output),
componentSubType: OSType(kAudioUnitSubType_VoiceProcessingIO),
componentManufacturer: OSType(kAudioUnitManufacturer_Apple),
componentFlags: 0,
componentFlagsMask: 0)
let comp = AudioComponentFindNext(nil, &acd)
var err : OSStatus
AudioComponentInstanceNew(comp!, ¤tAudioUnit)
var true_ui32: UInt32 = 1
guard AudioUnitSetProperty(currentAudioUnit!,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
1,
&true_ui32,
UInt32(MemoryLayout.size)) == 0 else {print ("could not enable IO for input "); return}
err = AudioUnitSetProperty(currentAudioUnit!,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
0,
&true_ui32,
UInt32(MemoryLayout.size))
guard err == 0 else {print ("could not enable IO for output "); return}
var sampleRate : Float64 = 8000
err = AudioUnitSetProperty(currentAudioUnit!,
kAudioUnitProperty_SampleRate,
kAudioUnitScope_Input,
0,
&sampleRate,
UInt32(MemoryLayout.size))
guard err == 0 else {print ("could not set sample rate (error=\(err))"); return}
var renderCallbackStruct = AURenderCallbackStruct(inputProc: recordingCallback, inputProcRefCon: UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()))
err = AudioUnitSetProperty(currentAudioUnit!,
AudioUnitPropertyID(kAudioOutputUnitProperty_SetInputCallback),
AudioUnitScope(kAudioUnitScope_Global),
1,
&renderCallbackStruct,
UInt32(MemoryLayout.size))
guard err == 0 else {print("could not set input callback"); return}
guard AudioUnitInitialize(currentAudioUnit!) == 0 else {print("could not initialize recording unit"); return}
guard AudioOutputUnitStart(currentAudioUnit!) == 0 else {print("could not start recording unit"); return}
print("Audio Recording started")
}
let recordingCallback: AURenderCallback = { (inRefCon, ioActionFlags, inTimeStamp, inBusNumber, frameCount, ioData ) -> OSStatus in
let audioObject = RecordingUnit.instance!
var err: OSStatus = noErr
guard let au = audioObject.currentAudioUnit else {print("AudioUnit nil (recording)"); return 0}
err = AudioUnitRender(au, ioActionFlags, inTimeStamp, inBusNumber, frameCount, &audioObject.micBuffers!)
let bufferPointer = UnsafeMutableRawPointer(audioObject.micBuffers!.mBuffers.mData)
let dataArray = bufferPointer!.assumingMemoryBound(to: Float.self)
var frames = 0
var sum = Float(0)
for i in 0.. \(frames)/\(frameCount) -> average=\(average)")
return 0
}
public func stop()
{
if currentAudioUnit != nil { AudioUnitUninitialize(currentAudioUnit!) }
}
}
PlaybackUnit.swift:
import Foundation
import AudioToolbox
import AVFoundation
class PlaybackUnit : NSObject {
public static var instance : PlaybackUnit!
public var InputBuffer : RingBuffer?
public var currentAudioUnit: AudioUnit?
override init()
{
super.init()
PlaybackUnit.instance = self
}
public func start()
{
var acd = AudioComponentDescription(
componentType: OSType(kAudioUnitType_Output),
componentSubType: OSType(kAudioUnitSubType_VoiceProcessingIO),
componentManufacturer: OSType(kAudioUnitManufacturer_Apple),
componentFlags: 0,
componentFlagsMask: 0)
let comp = AudioComponentFindNext(nil, &acd)
var err = AudioComponentInstanceNew(comp!, ¤tAudioUnit)
guard err == 0 else {print ("could not create a new AudioComponent instance"); return};
//set sample rate
var sampleRate : Float64 = 8000
AudioUnitSetProperty(currentAudioUnit!,
kAudioUnitProperty_SampleRate,
kAudioUnitScope_Input,
0,
&sampleRate,
UInt32(MemoryLayout.size))
guard err == 0 else {print ("could not set sample rate "); return};
//register render callback
var outputCallbackStruct = AURenderCallbackStruct(inputProc: outputCallback, inputProcRefCon: UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()))
err = AudioUnitSetProperty(currentAudioUnit!,
AudioUnitPropertyID(kAudioUnitProperty_SetRenderCallback),
AudioUnitScope(kAudioUnitScope_Input),
0,
&outputCallbackStruct,
UInt32(MemoryLayout.size))
guard err == 0 else {print("could not set render callback"); return}
guard AudioUnitInitialize(currentAudioUnit!) == 0 else {print("could not initialize output unit"); return}
guard AudioOutputUnitStart(currentAudioUnit!) == 0 else {print("could not start output unit"); return}
print("Audio Output started")
}
let outputCallback: AURenderCallback = { (
inRefCon,
ioActionFlags,
inTimeStamp,
inBusNumber,
frameCount,
ioData ) -> OSStatus in
let ins = PlaybackUnit.instance
let audioObject = ins!
var err: OSStatus = noErr
var frames = 0
var average : Float = 0
if var ringBuffer = audioObject.InputBuffer {
var dataArray = ioData!.pointee.mBuffers.mData!.assumingMemoryBound(to: Float.self)
var i = 0
while i < frameCount {
if let v = ringBuffer.read() {
dataArray[i] = v
average += v
} else {
dataArray[i] = 0
}
i += 1
frames += 1
}
}
average = average / Float(frameCount)
print("played -> \(frames)/\(frameCount) => avarage: \(average)")
return 0
}
public func stop()
{
if currentAudioUnit != nil { AudioUnitUninitialize(currentAudioUnit!) }
}
}
ViewController:
import UIKit
import AVFoundation
class ViewController: UIViewController {
var micPermission = false
private var micPermissionDispatchToken = 0
override func viewDidLoad() {
super.viewDidLoad()
let audioSession = AVAudioSession.sharedInstance()
if (micPermission == false) {
if (micPermissionDispatchToken == 0) {
micPermissionDispatchToken = 1
audioSession.requestRecordPermission({(granted: Bool)-> Void in
if granted {
self.micPermission = true
return
} else {
print("failed to grant microphone access!!")
}
})
}
}
if micPermission == false { return }
try! audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try! audioSession.setPreferredSampleRate(8000)
try! audioSession.overrideOutputAudioPort(.speaker)
try! audioSession.setPreferredOutputNumberOfChannels(1)
try! audioSession.setMode(AVAudioSessionModeVoiceChat)
try! audioSession.setActive(true)
let microphone = RecordingUnit()
let speakers = PlaybackUnit()
speakers.InputBuffer = microphone.OutputBuffer
microphone.start()
speakers.start()
}
}