Record and Play Audio Simultaneously

Record audio and play it simultaneously

It is possible, to play the audio directly with a stream method. The data will be saved in background. To get basic knowledge I suggest to take a look at these examples, explained by the android-studio documentation. This is a taff project as a beginner. Start step by step. First try to record audio, then stream with mediaPlayer and saving it. And the final step is all together. Cheers.

How to record and play audio simultaneously in iOS using Swift?

You are setting the InputCallback and RenderCallback method incorrectly. Other settings seems OK. So your init method should be like this.

init() {

var status: OSStatus

do {
try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(preferredIOBufferDuration)
} catch let error as NSError {
print(error)
}


var desc: AudioComponentDescription = AudioComponentDescription()
desc.componentType = kAudioUnitType_Output
desc.componentSubType = kAudioUnitSubType_VoiceProcessingIO
desc.componentFlags = 0
desc.componentFlagsMask = 0
desc.componentManufacturer = kAudioUnitManufacturer_Apple

let inputComponent: AudioComponent = AudioComponentFindNext(nil, &desc)

status = AudioComponentInstanceNew(inputComponent, &audioUnit)
checkStatus(status)

var flag = UInt32(1)
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, UInt32(sizeof(UInt32)))
checkStatus(status)

status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, UInt32(sizeof(UInt32)))
checkStatus(status)

var audioFormat: AudioStreamBasicDescription! = AudioStreamBasicDescription()
audioFormat.mSampleRate = 8000
audioFormat.mFormatID = kAudioFormatLinearPCM
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked
audioFormat.mFramesPerPacket = 1
audioFormat.mChannelsPerFrame = 1
audioFormat.mBitsPerChannel = 16
audioFormat.mBytesPerPacket = 2
audioFormat.mBytesPerFrame = 2

status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, UInt32(sizeof(UInt32)))
checkStatus(status)


try! AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &audioFormat, UInt32(sizeof(UInt32)))
checkStatus(status)


// Set input/recording callback
var inputCallbackStruct = AURenderCallbackStruct(inputProc: recordingCallback, inputProcRefCon: UnsafeMutablePointer(unsafeAddressOf(self)))
AudioUnitSetProperty(audioUnit, AudioUnitPropertyID(kAudioOutputUnitProperty_SetInputCallback), AudioUnitScope(kAudioUnitScope_Global), 1, &inputCallbackStruct, UInt32(sizeof(AURenderCallbackStruct)))


// Set output/renderar/playback callback
var renderCallbackStruct = AURenderCallbackStruct(inputProc: playbackCallback, inputProcRefCon: UnsafeMutablePointer(unsafeAddressOf(self)))
AudioUnitSetProperty(audioUnit, AudioUnitPropertyID(kAudioUnitProperty_SetRenderCallback), AudioUnitScope(kAudioUnitScope_Global), 0, &renderCallbackStruct, UInt32(sizeof(AURenderCallbackStruct)))


flag = 0
status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_ShouldAllocateBuffer, kAudioUnitScope_Output, kInputBus, &flag, UInt32(sizeof(UInt32)))
}

Try with this code and let us know if that helps.



Related Topics



Leave a reply



Submit