Swift - How to Save Audio from Avaudioengine, or from Audioplayernode? If Yes, How

SWIFT - Is it possible to save audio from AVAudioEngine, or from AudioPlayerNode? If yes, how?

Yes, it's quite easy. You simply put a tap on a node and save the buffer into a file.

Unfortunately this means you have to play through the node. I was hoping that AVAudioEngine would let me process one sound file into another directly, but apparently that's impossible - you have to play and process in real time.

Saving Audio After Effect in iOS

Here it is my solution to question :

func playAndRecord(pitch : Float, rate: Float, reverb: Float, echo: Float) {
// Initialize variables

// These are global variables . if you want you can just (let audioEngine = etc ..) init here these variables
audioEngine = AVAudioEngine()
audioPlayerNode = AVAudioPlayerNode()
audioEngine.attachNode(audioPlayerNode)
playerB = AVAudioPlayerNode()

audioEngine.attachNode(playerB)

// Setting the pitch
let pitchEffect = AVAudioUnitTimePitch()
pitchEffect.pitch = pitch
audioEngine.attachNode(pitchEffect)

// Setting the platback-rate
let playbackRateEffect = AVAudioUnitVarispeed()
playbackRateEffect.rate = rate
audioEngine.attachNode(playbackRateEffect)

// Setting the reverb effect
let reverbEffect = AVAudioUnitReverb()
reverbEffect.loadFactoryPreset(AVAudioUnitReverbPreset.Cathedral)
reverbEffect.wetDryMix = reverb
audioEngine.attachNode(reverbEffect)

// Setting the echo effect on a specific interval
let echoEffect = AVAudioUnitDelay()
echoEffect.delayTime = NSTimeInterval(echo)
audioEngine.attachNode(echoEffect)

// Chain all these up, ending with the output
audioEngine.connect(audioPlayerNode, to: playbackRateEffect, format: nil)
audioEngine.connect(playbackRateEffect, to: pitchEffect, format: nil)
audioEngine.connect(pitchEffect, to: reverbEffect, format: nil)
audioEngine.connect(reverbEffect, to: echoEffect, format: nil)
audioEngine.connect(echoEffect, to: audioEngine.mainMixerNode, format: nil)

// Good practice to stop before starting
audioPlayerNode.stop()

// Play the audio file
// this player is also a global variable AvAudioPlayer
if(player != nil){
player?.stop()
}

// audioFile here is our original audio
audioPlayerNode.scheduleFile(audioFile, atTime: nil, completionHandler: {
print("Complete")
})

try! audioEngine.start()

let dirPaths: AnyObject = NSSearchPathForDirectoriesInDomains( NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)[0]
let tmpFileUrl: NSURL = NSURL.fileURLWithPath(dirPaths.stringByAppendingPathComponent("effectedSound2.m4a"))

//Save the tmpFileUrl into global varibale to not lose it (not important if you want to do something else)
filteredOutputURL = tmpFileUrl

do{
print(dirPaths)

self.newAudio = try! AVAudioFile(forWriting: tmpFileUrl, settings: [
AVFormatIDKey: NSNumber(unsignedInt:kAudioFormatAppleLossless),
AVEncoderAudioQualityKey : AVAudioQuality.Low.rawValue,
AVEncoderBitRateKey : 320000,
AVNumberOfChannelsKey: 2,
AVSampleRateKey : 44100.0
])

let length = self.audioFile.length

audioEngine.mainMixerNode.installTapOnBus(0, bufferSize: 1024, format: self.audioEngine.mainMixerNode.inputFormatForBus(0)) {
(buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in

print(self.newAudio.length)
print("=====================")
print(length)
print("**************************")

if (self.newAudio.length) < length {//Let us know when to stop saving the file, otherwise saving infinitely

do{
//print(buffer)
try self.newAudio.writeFromBuffer(buffer)
}catch _{
print("Problem Writing Buffer")
}
}else{
self.audioEngine.mainMixerNode.removeTapOnBus(0)//if we dont remove it, will keep on tapping infinitely

//DO WHAT YOU WANT TO DO HERE WITH EFFECTED AUDIO

}

}
}catch _{
print("Problem")
}

audioPlayerNode.play()

}

save the audio file in the background

Actually we made mistake in the settings of Output Audio File. The output audio file processing format should be same like as the input file(which u put effect or pitch).

And the Output file format should be in the wav or caf format. This format only save to the output audio file.

 - (IBAction)save_it_after_changes:(id)sender
{

engine = [[AVAudioEngine alloc] init];
audio_player_node= [[AVAudioPlayerNode alloc] init];
[engine attachNode:audio_player_node];
[self setupEQ];

AVAudioMixerNode *mixerNode = [engine mainMixerNode];
[engine connect:audio_player_node to:unitEq format:audioFile.processingFormat];
[engine connect:unitEq to:mixerNode format:audioFile.processingFormat];

NSError *error12;
[engine startAndReturnError:&error12];
if (!error12)
{
NSLog(@"Engine = %@",engine);
[audio_player_node scheduleFile:audioFile atTime:nil completionHandler:nil];
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];

[recordSetting setValue :[NSNumber numberWithInt:kAudioFormatLinearPCM] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];

[recordSetting setValue :[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
[recordSetting setValue :[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsBigEndianKey];
[recordSetting setValue :[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsFloatKey];

NSError *error;
outputFile = [[AVAudioFile alloc] initForWriting:[self testFilePathURL] settings:recordSetting error:&error];
NSLog(@"outputfile = %@",outputFile);
if (error)
{
NSLog(@"outputFile error = %@",error);
}
else
{ //(AVAudioFrameCount)audioFile.length
[audio_player_node installTapOnBus:0 bufferSize:8192 format:audioFile.processingFormat block:^(AVAudioPCMBuffer *buffer, AVAudioTime *when) {
NSLog(@"Buffer Size = %@",buffer);
NSLog(@"when = %lld",when.sampleTime);
NSLog(@"outputfile length = %lli",outputFile.length);
NSLog(@"input file length = %lld",audioFile.length);
if (outputFile.length<audioFile.length)
{
NSError *error;
[outputFile writeFromBuffer:buffer error:&error];
if (error)
{
NSLog(@"writebuffererror =%@",error);
}
}
else
{
[audio_player_node removeTapOnBus:0];
NSError *error2;
// player2 = [[AVAudioPlayer alloc] initWithContentsOfURL:[self testFilePathURL] error:&error2];
//player2.delegate = self;
NSLog(@"Pathththt = %@",[self testFilePathURL]);
NSLog(@"error = %@",error2);
// [audio_player_node scheduleFile:outputFile atTime:nil completionHandler:nil];
//[audio_player_node play];
// [self toMp3];
}

}];
}
}
else
{
NSLog(@"error12 =%@",error12);
}

}

- (void)setupEQ
{
NSLog(@"setupEQ");

unitEq = [[AVAudioUnitEQ alloc] initWithNumberOfBands:12];
unitEq.globalGain = 3.0;
AVAudioUnitEQFilterParameters *filterParameters;
filterParameters = unitEq.bands[0];
filterParameters.filterType = AVAudioUnitEQFilterTypeParametric;
filterParameters.frequency = 31;
filterParameters.bandwidth = 1.0;
filterParameters.gain = -20;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[1];
filterParameters.filterType = AVAudioUnitEQFilterTypeParametric;
filterParameters.frequency = 63;
filterParameters.bandwidth = 1.0;
filterParameters.gain = -20;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[2];
filterParameters.filterType = AVAudioUnitEQFilterTypeParametric;
filterParameters.frequency = 125;
filterParameters.bandwidth = 1.0;
filterParameters.gain = -20;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[3];
filterParameters.filterType = AVAudioUnitEQFilterTypeParametric;
filterParameters.frequency = 250;
filterParameters.bandwidth = 1.0;
filterParameters.gain = -20;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[4];
filterParameters.filterType = AVAudioUnitEQFilterTypeParametric;
filterParameters.frequency = 500;
filterParameters.bandwidth = 1.0;
filterParameters.gain = -20;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[5];
filterParameters.filterType = AVAudioUnitEQFilterTypeParametric;
filterParameters.frequency = 1000;
filterParameters.bandwidth = 1.0;
filterParameters.gain = -20;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[6];
filterParameters.filterType = AVAudioUnitEQFilterTypeParametric;
filterParameters.frequency = 2000;
filterParameters.bandwidth = 1.0;
filterParameters.gain = -20;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[7];
filterParameters.filterType = AVAudioUnitEQFilterTypeParametric;
filterParameters.frequency = 4000;
filterParameters.bandwidth = 1.0;
filterParameters.gain =-20;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[8];
filterParameters.filterType = AVAudioUnitEQFilterTypeParametric;
filterParameters.frequency = 8000;
filterParameters.bandwidth = 1.0;
filterParameters.gain = -20;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[9];
filterParameters.filterType = AVAudioUnitEQFilterTypeParametric;
filterParameters.frequency = 16000;
filterParameters.bandwidth = 1.0;
filterParameters.gain =-20;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[10];
filterParameters.filterType = AVAudioUnitEQFilterTypeLowPass;
filterParameters.frequency = 16857;
filterParameters.bypass = FALSE;

filterParameters = unitEq.bands[11];
filterParameters.filterType = AVAudioUnitEQFilterTypeHighPass;
filterParameters.frequency = 205;
filterParameters.bypass = FALSE;
[engine attachNode:unitEq];
}

- (NSString *)applicationDocumentsDirectory
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return basePath;
}

//------------------------------------------------------------------------------

- (NSURL *)testFilePathURL
{
return [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/test.caf",
[self applicationDocumentsDirectory]]];
}

Please play the file after successfully saved. It works for me. check it out.

Please refer below link, I get more from here, Can I use AVAudioEngine to read from a file, process with an audio unit and write to a file, faster than real-time?

refer this sample project. It is what we are looking for
https://github.com/VladimirKravchenko/AVAudioEngineOfflineRender

Record audio with added effects

You need to flush and close the file audio file, so that the caf file is properly written out.

Seeing AVAudioFile doesn't have explicit methods for doing that, your only hope appears to be setting newAudio to nil after you've finished and hoping that it is done during AVAudioFile's dealloc:

self.audioEngine.mainMixerNode.removeTapOnBus(0)
print("finish?")
self.newAudio = nil // hopefully flush & close, if there are no other strong references

Playing an audio file repeatedly with AVAudioEngine

I found the solution in another question, asked and also auto-answered by @CarveDrone , so I've just copied the code he used:

class aboutViewController: UIViewController {

var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()

override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.

let filePath: String = NSBundle.mainBundle().pathForResource("chimes", ofType: "wav")!
println("\(filePath)")
let fileURL: NSURL = NSURL(fileURLWithPath: filePath)!
let audioFile = AVAudioFile(forReading: fileURL, error: nil)
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
audioFile.readIntoBuffer(audioFileBuffer, error: nil)

var mainMixer = audioEngine.mainMixerNode
audioEngine.attachNode(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
audioEngine.startAndReturnError(nil)

audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, atTime: nil, options:.Loops, completionHandler: nil)
}

The only thing you have to change is the filePath constant. Here is the link to the original answer: Having AVAudioEngine repeat a sound

Tap audio output using AVAudioEngine

I was facing the same problem and during 2 days of brainstorming found the following.

Apple says that For AVAudioOutputNode, tap format must be specified as nil. I'm not sure that it's important but in my case, that finally worked, format was nil.
You need to start recording and don't forget to stop it.

Removing tap is really important, otherwise you will have file that you can't open.

Try to save the file with the same audio settings that you used in source file.

Here's my code that finally worked. It was partly taken from this question Saving Audio After Effect in iOS.

    func playSound() {
let rate: Float? = effect.speed
let pitch: Float? = effect.pitch
let echo: Bool? = effect.echo
let reverb: Bool? = effect.reverb

// initialize audio engine components
audioEngine = AVAudioEngine()

// node for playing audio
audioPlayerNode = AVAudioPlayerNode()
audioEngine.attach(audioPlayerNode)

// node for adjusting rate/pitch
let changeRatePitchNode = AVAudioUnitTimePitch()
if let pitch = pitch {
changeRatePitchNode.pitch = pitch
}
if let rate = rate {
changeRatePitchNode.rate = rate
}
audioEngine.attach(changeRatePitchNode)

// node for echo
let echoNode = AVAudioUnitDistortion()
echoNode.loadFactoryPreset(.multiEcho1)
audioEngine.attach(echoNode)

// node for reverb
let reverbNode = AVAudioUnitReverb()
reverbNode.loadFactoryPreset(.cathedral)
reverbNode.wetDryMix = 50
audioEngine.attach(reverbNode)

// connect nodes
if echo == true && reverb == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, reverbNode, audioEngine.mainMixerNode, audioEngine.outputNode)
} else if echo == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, audioEngine.mainMixerNode, audioEngine.outputNode)
} else if reverb == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, reverbNode, audioEngine.mainMixerNode, audioEngine.outputNode)
} else {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, audioEngine.mainMixerNode, audioEngine.outputNode)
}

// schedule to play and start the engine!
audioPlayerNode.stop()
audioPlayerNode.scheduleFile(audioFile, at: nil) {
var delayInSeconds: Double = 0
if let lastRenderTime = self.audioPlayerNode.lastRenderTime, let playerTime = self.audioPlayerNode.playerTime(forNodeTime: lastRenderTime) {
if let rate = rate {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate) / Double(rate)
} else {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate)
}
}

// schedule a stop timer for when audio finishes playing
self.stopTimer = Timer(timeInterval: delayInSeconds, target: self, selector: #selector(EditViewController.stopAudio), userInfo: nil, repeats: false)
RunLoop.main.add(self.stopTimer!, forMode: RunLoop.Mode.default)
}

do {
try audioEngine.start()
} catch {
showAlert(Alerts.AudioEngineError, message: String(describing: error))
return
}

//Try to save
let dirPaths: String = (NSSearchPathForDirectoriesInDomains(.libraryDirectory, .userDomainMask, true)[0]) + "/sounds/"
let tmpFileUrl = URL(fileURLWithPath: dirPaths + "effected.caf")

//Save the tmpFileUrl into global varibale to not lose it (not important if you want to do something else)
filteredOutputURL = URL(fileURLWithPath: filePath)
do{
print(dirPaths)
let settings = [AVSampleRateKey : NSNumber(value: Float(44100.0)),
AVFormatIDKey : NSNumber(value: Int32(kAudioFormatMPEG4AAC)),
AVNumberOfChannelsKey : NSNumber(value: 1),
AVEncoderAudioQualityKey : NSNumber(value: Int32(AVAudioQuality.medium.rawValue))]
self.newAudio = try! AVAudioFile(forWriting: tmpFileUrl as URL, settings: settings)
let length = self.audioFile.length
audioEngine.mainMixerNode.installTap(onBus: 0, bufferSize: 4096, format: nil) {
(buffer: AVAudioPCMBuffer?, time: AVAudioTime!) -> Void in
//Let us know when to stop saving the file, otherwise saving infinitely
if (self.newAudio.length) <= length {
do{
try self.newAudio.write(from: buffer!)

} catch _{
print("Problem Writing Buffer")
}
} else {
//if we dont remove it, will keep on tapping infinitely
self.audioEngine.mainMixerNode.removeTap(onBus: 0)
}
}
}

// play the recording!
audioPlayerNode.play()

}

@objc func stopAudio() {
if let audioPlayerNode = audioPlayerNode {
let engine = audioEngine
audioPlayerNode.stop()
engine?.mainMixerNode.removeTap(onBus: 0)

}
if let stopTimer = stopTimer {
stopTimer.invalidate()
}
configureUI(.notPlaying)
if let audioEngine = audioEngine {
audioEngine.stop()
audioEngine.reset()
}
isPlaying = false
}


Related Topics



Leave a reply



Submit