Aurendercallbackstruct in Swift

AURenderCallback in Swift

I just found your post while trying to figure out the same (it's not easy finding sample code and examples combining CoreAudio/Audio Unit and Swift).

By looking at this repository and reading (several times :-)) Apples documentation about Using Swift with Cocoa and Objective-C I managed to piece something together. As it says in the section about Function Pointers

When calling a function that takes a function pointer argument, you can pass a top-level Swift function, a closure literal, or nil.

So. Outside of my class I have a method that looks like this:

func renderCallback(inRefCon:UnsafeMutablePointer<Void>,
ioActionFlags:UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp:UnsafePointer<AudioTimeStamp>,
inBusNumber:UInt32,
inNumberFrames:UInt32,
ioData:UnsafeMutablePointer<AudioBufferList>) -> OSStatus {
let delegate = unsafeBitCast(inRefCon, AURenderCallbackDelegate.self)
let result = delegate.performRender(ioActionFlags,
inTimeStamp: inTimeStamp,
inBusNumber: inBusNumber,
inNumberFrames: inNumberFrames,
ioData: ioData)
return result
}

As you can see, I just call a delegate here. That delegate is declared like so (also outside the class but you already knew that :-))

@objc protocol AURenderCallbackDelegate {
func performRender(ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp: UnsafePointer<AudioTimeStamp>,
inBusNumber: UInt32,
inNumberFrames: UInt32,
ioData: UnsafeMutablePointer<AudioBufferList>) -> OSStatus
}

Doing so enables me to "get back inside my class" by conforming to the AURenderCallbackDelegate like so:

class AudioUnitGraphManager: NSObject, AURenderCallbackDelegate

And then implementing the renderCallback method in my AudioUnitGraphManager class

func performRender(ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>, inTimeStamp: UnsafePointer<AudioTimeStamp>, inBusNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer<AudioBufferList>) -> OSStatus {
print("Hello there!")
return noErr
}

The final piece of the puzzle is to actually enable the render notify callback which I do like so:

AudioUnitAddRenderNotify(mixerUnit, renderCallback, UnsafeMutablePointer(unsafeAddressOf(self)))

Hopefully this gives you something to continue the struggle with.

Changes in Swift 3

In Swift 3 the declaration for AURenderCallback has changed to this:

typealias AURenderCallback = (UnsafeMutableRawPointer, UnsafeMutablePointer<AudioUnitRenderActionFlags>, UnsafePointer<AudioTimeStamp>, UInt32, UInt32, UnsafeMutablePointer<AudioBufferList>?) -> OSStatus

Notice the last parameter is now UnsafeMutablePointer<AudioBufferList>? compared to UnsafeMutablePointer<AudioBufferList> before (it is an optional now).

This means that the code now looks like this.

The renderCallback function

func renderCallback(inRefCon:UnsafeMutablePointer<Void>,
ioActionFlags:UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp:UnsafePointer<AudioTimeStamp>,
inBusNumber:UInt32,
inNumberFrames:UInt32,
ioData:UnsafeMutablePointer<AudioBufferList>?) -> OSStatus {
let delegate = unsafeBitCast(inRefCon, AURenderCallbackDelegate.self)
let result = delegate.performRender(ioActionFlags,
inTimeStamp: inTimeStamp,
inBusNumber: inBusNumber,
inNumberFrames: inNumberFrames,
ioData: ioData)
return result
}

The AURenderCallbackDelegate protocol

@objc protocol AURenderCallbackDelegate {
func performRender(ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp: UnsafePointer<AudioTimeStamp>,
inBusNumber: UInt32,
inNumberFrames: UInt32,
ioData: UnsafeMutablePointer<AudioBufferList>?) -> OSStatus
}

The actual implementation of performRender

    func performRender(ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>, inTimeStamp: UnsafePointer<AudioTimeStamp>, inBusNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer<AudioBufferList>?) -> OSStatus {
print("Hello there!")
return noErr
}

Enabling the render notify callback

AudioUnitAddRenderNotify(mixerUnit!, renderCallback, Unmanaged.passUnretained(self).toOpaque())

UnsafeMutablePointer in AURenderCallback

In Swift 3, initializers cannot be used to convert pointer types. In your case, the type of inRefCon is UnsafeMutableRawPointer, so you need to use assumingMemoryBound(to:) method.

And one more, the address of player passed to the callback needs to be stable all while the sound is playing, addresses taken from inout arguments (specified by & prefix) does not fulfil this requirement.

The two things above fixed, your code would be something like this:

import Foundation
import AudioToolbox

let sineFrequency = 880.0

// MARK: User data struct
struct SineWavePlayer {
var outputUnit: AudioUnit? = nil
var startingFrameCount: Double = 0
}

// MARK: Callback function
let SineWaveRenderProc: AURenderCallback = {(inRefCon, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, ioData) -> OSStatus in

var player = inRefCon.assumingMemoryBound(to: SineWavePlayer.self)

var j = player.pointee.startingFrameCount
let cycleLength = 44100 / sineFrequency

for frame in 0..<inNumberFrames {
var buffers = UnsafeMutableAudioBufferListPointer(ioData)

buffers?[0].mData?.assumingMemoryBound(to: Float32.self)[Int(frame)] = Float32(sin(2 * M_PI * (j / cycleLength)))
buffers?[1].mData?.assumingMemoryBound(to: Float32.self)[Int(frame)] = Float32(sin(2 * M_PI * (j / cycleLength)))

j += 1
if j > cycleLength {
j -= cycleLength
}
}

player.pointee.startingFrameCount = j
return noErr
}

// MARK: Utility function
func CheckError(_ error: OSStatus, operation: String) {
guard error != noErr else {
return
}

var result: String = ""
var char = Int(error.bigEndian)

for _ in 0..<4 {
guard isprint(Int32(char&255)) == 1 else {
result = "\(error)"
break
}
result.append(String(describing: UnicodeScalar(char&255)))
char = char/256
}

print("Error: \(operation) (\(result))")

exit(1)
}

func CreateAndConnectOutputUnit(_ playerPtr: UnsafeMutablePointer<SineWavePlayer>) {
// Generate a description that matches the output device (speakers)
var outputcd = AudioComponentDescription(componentType: kAudioUnitType_Output, componentSubType: kAudioUnitSubType_DefaultOutput, componentManufacturer: kAudioUnitManufacturer_Apple, componentFlags: 0, componentFlagsMask: 0)

let comp = AudioComponentFindNext(nil, &outputcd)

if comp == nil {
print("Can't get output unit")
exit(-1)
}

CheckError(AudioComponentInstanceNew(comp!, &playerPtr.pointee.outputUnit),
operation: "Couldn't open component for outputUnit")

// Register the render callback
var input = AURenderCallbackStruct(inputProc: SineWaveRenderProc, inputProcRefCon: playerPtr)

CheckError(AudioUnitSetProperty(playerPtr.pointee.outputUnit!, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &input, UInt32(MemoryLayout<AURenderCallbackStruct>.size)),
operation: "AudioUnitSetProperty failed")

// Initialize the unit
CheckError(AudioUnitInitialize(playerPtr.pointee.outputUnit!),
operation: "Couldn't initialize output unit")
}

func main() {
let playerPtr = UnsafeMutablePointer<SineWavePlayer>.allocate(capacity: 1)
defer {playerPtr.deallocate(capacity: 1)}
playerPtr.initialize(to: SineWavePlayer())
defer {playerPtr.deinitialize()}

// Set up output unit and callback
CreateAndConnectOutputUnit(playerPtr)

// Start playing
CheckError(AudioOutputUnitStart(playerPtr.pointee.outputUnit!),
operation: "Couldn't start output unit")

// Play for 5 seconds
sleep(5)

// Clean up
AudioOutputUnitStop(playerPtr.pointee.outputUnit!)
AudioUnitUninitialize(playerPtr.pointee.outputUnit!)
AudioComponentInstanceDispose(playerPtr.pointee.outputUnit!)
}

Core Audio sound metering on multiple file players

You shouldn't be doing the callback in Swift. Render thread processing should only be done in C/C++.

You can use a render notify:

AudioUnitAddRenderNotify(mixer, my_C_callback, nil) //my_C_callback should not reference Swift objects, or be a Swift callback.

It uses the same function signature as the render callback. It gets called pre and post render, you want to process post render. You get this information from the ioActionFlags.

int isPostRender = ioActionFlags & kAudioUnitRenderAction_PostRender;

However, since you're using the multi-channel mixer, input level monitoring is built in, so you won't need the callback.

You first enable it like this.

//AudioUnit mixer; kAudioUnitSubType_MultiChannelMixer 

//Call Before AudioUnitInitialize()
UInt32 meteringMode = 1;
AudioUnitSetProperty(mixer, kAudioUnitProperty_MeteringMode, kAudioUnitScope_Input, 0, &meteringMode, sizeof(meteringMode));

Then during processing you can get the levels by reading parameters.

int channel = 0;

AudioUnitParameterValue averageDecibles;
AudioUnitGetParameter(mixer, kMultiChannelMixerParam_PreAveragePower, kAudioUnitScope_Input, channel, &averageDecibles);

AudioUnitParameterValue peakHoldDecibles;
AudioUnitGetParameter(mixer, kMultiChannelMixerParam_PrePeakHoldLevel, kAudioUnitScope_Input, channel, &peakHoldDecibles);

Swift:

var meteringMode: UInt32 = 1;
let propSize = UInt32(MemoryLayout<UInt32>.size)
AudioUnitSetProperty(mixer, kAudioUnitProperty_MeteringMode, kAudioUnitScope_Input, 0, &meteringMode, propSize);

var averageDecibles: AudioUnitParameterValue = 0
AudioUnitGetParameter(mixer, kMultiChannelMixerParam_PreAveragePower, kAudioUnitScope_Input, channel, &averageDecibles);

var peakHoldDecibles: AudioUnitParameterValue = 0
AudioUnitGetParameter(mixer, kMultiChannelMixerParam_PrePeakHoldLevel, kAudioUnitScope_Input, channel, &peakHoldDecibles);

How to record and play audio simultaneously in iOS using Swift?

You are setting the InputCallback and RenderCallback method incorrectly. Other settings seems OK. So your init method should be like this.

init() {

var status: OSStatus

do {
try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(preferredIOBufferDuration)
} catch let error as NSError {
print(error)
}

var desc: AudioComponentDescription = AudioComponentDescription()
desc.componentType = kAudioUnitType_Output
desc.componentSubType = kAudioUnitSubType_VoiceProcessingIO
desc.componentFlags = 0
desc.componentFlagsMask = 0
desc.componentManufacturer = kAudioUnitManufacturer_Apple

let inputComponent: AudioComponent = AudioComponentFindNext(nil, &desc)

status = AudioComponentInstanceNew(inputComponent, &audioUnit)
checkStatus(status)

var flag = UInt32(1)
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, UInt32(sizeof(UInt32)))
checkStatus(status)

status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, UInt32(sizeof(UInt32)))
checkStatus(status)

var audioFormat: AudioStreamBasicDescription! = AudioStreamBasicDescription()
audioFormat.mSampleRate = 8000
audioFormat.mFormatID = kAudioFormatLinearPCM
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked
audioFormat.mFramesPerPacket = 1
audioFormat.mChannelsPerFrame = 1
audioFormat.mBitsPerChannel = 16
audioFormat.mBytesPerPacket = 2
audioFormat.mBytesPerFrame = 2

status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, UInt32(sizeof(UInt32)))
checkStatus(status)

try! AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &audioFormat, UInt32(sizeof(UInt32)))
checkStatus(status)

// Set input/recording callback
var inputCallbackStruct = AURenderCallbackStruct(inputProc: recordingCallback, inputProcRefCon: UnsafeMutablePointer(unsafeAddressOf(self)))
AudioUnitSetProperty(audioUnit, AudioUnitPropertyID(kAudioOutputUnitProperty_SetInputCallback), AudioUnitScope(kAudioUnitScope_Global), 1, &inputCallbackStruct, UInt32(sizeof(AURenderCallbackStruct)))

// Set output/renderar/playback callback
var renderCallbackStruct = AURenderCallbackStruct(inputProc: playbackCallback, inputProcRefCon: UnsafeMutablePointer(unsafeAddressOf(self)))
AudioUnitSetProperty(audioUnit, AudioUnitPropertyID(kAudioUnitProperty_SetRenderCallback), AudioUnitScope(kAudioUnitScope_Global), 0, &renderCallbackStruct, UInt32(sizeof(AURenderCallbackStruct)))

flag = 0
status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_ShouldAllocateBuffer, kAudioUnitScope_Output, kInputBus, &flag, UInt32(sizeof(UInt32)))
}

Try with this code and let us know if that helps.



Related Topics



Leave a reply



Submit