Convert AVAudioPCMBuffer to NSData and back
Buffer length is frameCapacity * bytesPerFrame. Here are functions that can do conversion between NSData and AVAudioPCMBuffer.
extension AVAudioPCMBuffer {
func data() -> Data {
let channelCount = 1 // given PCMBuffer channel count is 1
let channels = UnsafeBufferPointer(start: self.floatChannelData, count: channelCount)
let ch0Data = NSData(bytes: channels[0], length:Int(self.frameCapacity * self.format.streamDescription.pointee.mBytesPerFrame))
return ch0Data as Data
}
}
func toPCMBuffer(data: NSData) -> AVAudioPCMBuffer? {
let audioFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 8000, channels: 1, interleaved: false)! // given NSData audio format
guard let PCMBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: UInt32(data.length) / audioFormat.streamDescription.pointee.mBytesPerFrame) else {
return nil
}
PCMBuffer.frameLength = PCMBuffer.frameCapacity
let channels = UnsafeBufferPointer(start: PCMBuffer.floatChannelData, count: Int(PCMBuffer.format.channelCount))
data.getBytes(UnsafeMutableRawPointer(channels[0]) , length: data.length)
return PCMBuffer
}
Converting AVAudioPCMBuffer to NSData
I don't see any method named inTotalBitsPerChannel
in either of the code samples you linked to; instead, they both seem to use mBytesPerFrame
. You will also need .pointee
to dereference the pointer. Finally, in modern Swift, you should generally prefer to use Data
over NSData
. So, basically, I think your extension should work if you rewrite the last line to:
let ch0Data = Data(bytes: channels[0], count: Int(frameCapacity * format.streamDescription.pointee.mBytesPerFrame))
Converting NSData to AVAudioPCMBuffer
You're using the wrong AVAudioBuffer
subclass. AVAudioPCMBuffer
is for uncompressed "Pulse-code modulation" audio. You want AVAudioCompressedBuffer
instead.
How to play audio from AVAudioPCMBuffer converted from NSData
ended up using an objective-c function:data is getting converted fine
-(AudioBufferList *) getBufferListFromData: (NSData *) data
{
if (data.length > 0)
{
NSUInteger len = [data length];
//NSData *d2 = [data subdataWithRange:NSMakeRange(4, 1028)];
//I guess you can use Byte*, void* or Float32*. I am not sure if that makes any difference.
Byte* byteData = (Byte*) malloc (len);
memcpy (byteData, [data bytes], len);
if (byteData)
{
AudioBufferList * theDataBuffer =(AudioBufferList*)malloc(sizeof(AudioBufferList) * 1);
theDataBuffer->mNumberBuffers = 1;
theDataBuffer->mBuffers[0].mDataByteSize =(UInt32) len;
theDataBuffer->mBuffers[0].mNumberChannels = 1;
theDataBuffer->mBuffers[0].mData = byteData;
// Read the data into an AudioBufferList
return theDataBuffer;
}
}
return nil;
}
How to convert audio so that it can be streamed across devices
Here you go:
func audioBufferToNSData(PCMBuffer: AVAudioPCMBuffer) -> NSData {
let channelCount = 1 // given PCMBuffer channel count is 1
let channels = UnsafeBufferPointer(start: PCMBuffer.floatChannelData, count: channelCount)
let data = NSData(bytes: channels[0], length:Int(PCMBuffer.frameCapacity * PCMBuffer.format.streamDescription.pointee.mBytesPerFrame))
return data
}
func dataToAudioBuffer(data: NSData) -> AVAudioPCMBuffer {
let audioFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)
let audioBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: UInt32(data.length) / audioFormat.streamDescription.pointee.mBytesPerFrame)
audioBuffer.frameLength = audioBuffer.frameCapacity
let channels = UnsafeBufferPointer(start: audioBuffer.floatChannelData, count: Int(audioBuffer.format.channelCount))
data.getBytes(UnsafeMutableRawPointer(channels[0]) , length: data.length)
return audioBuffer
}
How to send NSData over an OutputStream
Pretty sure you want to replace this:
let length = self.inputStream!.read(&buffer, maxLength: buffer.count)
let data = NSData.init(bytes: buffer, length: buffer.count)
With
let length = self.inputStream!.read(&buffer, maxLength: buffer.count)
let data = NSData.init(bytes: buffer, length: length)
Also, I am not 100% sure that the random blocks of data will always be ok to use to make the audio buffers. You might need to collect up the data first into a bigger block of NSData.
Right now, since you always pass in blocks of 8192 (even if you read less), the buffer creation probably always succeeds. It might not now.
Related Topics
In Swift 3, What Is a Way to Compare Two Closures
What's the Difference Between Struct Based and Class Based Singletons
Override Func Error in Swift 2
Convert Avaudiopcmbuffer to Nsdata and Back
Simpliest Solution to Check If File Exists on a Webserver. (Swift)
How Does One Trap Arithmetic Overflow Errors in Swift
How to Clear the Terminal Screen in Swift
How to Go Back to the Initial View Controller in Swift
How to Reason When I Have to Choose Between a Class, Struct and Enum in Swift
Convert Uiimage to Grayscale Keeping Image Quality
Convert a Swift Array of String to a to a C String Array Pointer
Mathematical Functions in Swift
Can the Height of the Uisearchbar Textfield Be Modified
How to Left Align the Title of a Navigation Bar in Xcode
How to Detect a Swiftui Touchdown Event with No Movement or Duration
Why Isn't Guard Let Foo = Foo Valid
Compress Image in iOS 12. How Will This Code Be Updated
How to Export Dae Files for Use in Scene Kit Without Seeing "Untitled-Animations"