Xcode 8 Swift 3 Pitch-altering sounds
You're resetting the engine every time you play a sound! And you're creating extra player nodes - it's actually much simpler than that if you only want one instance of the pitch shifted sound playing at once:
// instance variables
let engine = AVAudioEngine()
let audioPlayerNode = AVAudioPlayerNode()
let changeAudioUnitTime = AVAudioUnitTimePitch()
call setupAudioEngine()
once:
func setupAudioEngine() {
engine.attach(self.audioPlayerNode)
engine.attach(changeAudioUnitTime)
engine.connect(audioPlayerNode, to: changeAudioUnitTime, format: nil)
engine.connect(changeAudioUnitTime, to: engine.outputNode, format: nil)
try? engine.start()
audioPlayerNode.play()
}
and call hitSound()
as many times as you like:
func hitSound(value: Float) {
changeAudioUnitTime.pitch = value
audioPlayerNode.scheduleFile(file, at: nil, completionHandler: nil) // File is an AVAudioFile defined previously
}
p.s. pitch can be shifted two octaves up or down, for a range of 4 octaves, and lies in the numerical range of [-2400, 2400], having the unit "cents".
p.p.s AVAudioUnitTimePitch
is very cool technology. We definitely didn't have anything like it when I was a kid.
UPDATE
If you want multi channel, you can easily set up multiple player and pitch nodes, however you must choose the number of channels before you start the engine. Here's how you'd do two (it's easy to extend to n instances, and you'll probably want to choose your own method of choosing which channel to interrupt when all are playing):
// instance variables
let engine = AVAudioEngine()
var nextPlayerIndex = 0
let audioPlayers = [AVAudioPlayerNode(), AVAudioPlayerNode()]
let pitchUnits = [AVAudioUnitTimePitch(), AVAudioUnitTimePitch()]
func setupAudioEngine() {
var i = 0
for playerNode in audioPlayers {
let pitchUnit = pitchUnits[i]
engine.attach(playerNode)
engine.attach(pitchUnit)
engine.connect(playerNode, to: pitchUnit, format: nil)
engine.connect(pitchUnit, to:engine.mainMixerNode, format: nil)
i += 1
}
try? engine.start()
for playerNode in audioPlayers {
playerNode.play()
}
}
func hitSound(value: Float) {
let playerNode = audioPlayers[nextPlayerIndex]
let pitchUnit = pitchUnits[nextPlayerIndex]
pitchUnit.pitch = value
// interrupt playing sound if you have to
if playerNode.isPlaying {
playerNode.stop()
playerNode.play()
}
playerNode.scheduleFile(file, at: nil, completionHandler: nil) // File is an AVAudioFile defined previously
nextPlayerIndex = (nextPlayerIndex + 1) % audioPlayers.count
}
Play a sound file and apply pitch - Xcode 8 - Mac
The following code is working. It is playing a file defined via the "path" variable and allows to change the pitch from a 440 Hz diapason to the 432 Hz one (see Plato's scale for details) in real time, just click on the checkbox. I added a picture of the interface to help create it.
import Cocoa
import AVFoundation
let engine = AVAudioEngine()
let audioPlayerNode = AVAudioPlayerNode()
var changeAudioUnitTime = AVAudioUnitTimePitch()
var audioFile = AVAudioFile()
@NSApplicationMain
class AppDelegate: NSObject, NSApplicationDelegate
{
@IBOutlet weak var window: NSWindow!
@IBOutlet weak var change432: NSButton!
func applicationDidFinishLaunching(_ aNotification: Notification)
{
}
func setupAudioEngine()
{
let path = "/Volumes/.../Musique/Musique indienne.m4a"
let url = URL(fileURLWithPath: path)
do
{
audioFile = try AVAudioFile(forReading: url, commonFormat: AVAudioCommonFormat.pcmFormatInt16, interleaved: false)
}
catch let error as NSError {print(error.localizedDescription)}
engine.attach(audioPlayerNode)
engine.attach(changeAudioUnitTime)
engine.connect(audioPlayerNode, to: changeAudioUnitTime, format: nil)
engine.connect(changeAudioUnitTime, to: engine.outputNode, format: nil)
audioPlayerNode.scheduleFile(audioFile, at: nil, completionHandler: nil)
try? engine.start()
audioPlayerNode.play()
}
@IBAction func changeDiapason(_ sender: NSButton)
{
if change432.state == NSOnState
{
changeAudioUnitTime.pitch = -32 // = 432 Hz(valeurs admises de -2400 à 2400, une octave étant 1200 cents).
}
if change432.state == NSOffState
{
changeAudioUnitTime.pitch = 1
}
}
@IBAction func lancer(_ sender: NSButton)
{
setupAudioEngine()
}
@IBAction func stopper(_ sender: NSButton)
{
audioPlayerNode.stop()
audioPlayerNode.reset()
engine.stop()
engine.disconnectNodeInput(changeAudioUnitTime)
engine.disconnectNodeInput(audioPlayerNode)
engine.detach(changeAudioUnitTime)
engine.detach(audioPlayerNode)
engine.reset()
//exit(0)
}
func applicationWillTerminate(_ aNotification: Notification)
{
// Insert code here to tear down your application
}
}
Swift 3: Quickly playing sounds on AVAudioPlayerNode
Seems scheduleBuffer is what I needed after all
Real time pitch shifting in swift iOS
I couldn't find a proper answer to my question above and I will appreciate if someone could answer that, but for now I found AudioKit which is a great library for sound manipulation and will do the job perfectly.
This is the link to my project's source code if someone needs this in the future.
import UIKit
import AudioKit
import AudioKitUI
class ViewController: UIViewController {
@IBOutlet weak var slider: UISlider!
@IBOutlet weak var label: UILabel!
@IBOutlet var audioInputPlot: AKNodeOutputPlot!
var micBooster: AKBooster?
var pitchShifter: AKPitchShifter?
var tracker: AKFrequencyTracker!
let mic = AKMicrophone()
override func viewDidLoad() {
super.viewDidLoad()
AKSettings.sampleRate = AudioKit.engine.inputNode.inputFormat(forBus: 0).sampleRate
let micMixer = AKMixer(mic)
tracker = AKFrequencyTracker.init(mic)
micBooster = AKBooster(micMixer)
pitchShifter = AKPitchShifter(micBooster, shift: 0)
micBooster!.gain = 5
AudioKit.output = pitchShifter
do{
try AudioKit.start()
} catch {
print("error occured")
}
}
}
How to play a sound using Swift?
Most preferably you might want to use AVFoundation.
It provides all the essentials for working with audiovisual media.
Update: Compatible with Swift 2, Swift 3 and Swift 4 as suggested by some of you in the comments.
Swift 2.3
import AVFoundation
var player: AVAudioPlayer?
func playSound() {
let url = NSBundle.mainBundle().URLForResource("soundName", withExtension: "mp3")!
do {
player = try AVAudioPlayer(contentsOfURL: url)
guard let player = player else { return }
player.prepareToPlay()
player.play()
} catch let error as NSError {
print(error.description)
}
}
Swift 3
import AVFoundation
var player: AVAudioPlayer?
func playSound() {
guard let url = Bundle.main.url(forResource: "soundName", withExtension: "mp3") else { return }
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
try AVAudioSession.sharedInstance().setActive(true)
let player = try AVAudioPlayer(contentsOf: url)
player.play()
} catch let error {
print(error.localizedDescription)
}
}
Swift 4 (iOS 13 compatible)
import AVFoundation
var player: AVAudioPlayer?
func playSound() {
guard let url = Bundle.main.url(forResource: "soundName", withExtension: "mp3") else { return }
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
/* The following line is required for the player to work on iOS 11. Change the file type accordingly*/
player = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileType.mp3.rawValue)
/* iOS 10 and earlier require the following line:
player = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileTypeMPEGLayer3) */
guard let player = player else { return }
player.play()
} catch let error {
print(error.localizedDescription)
}
}
Make sure to change the name of your tune as well as the extension.
The file needs to be properly imported (Project Build Phases
>Copy Bundle Resources
). You might want to place it inassets.xcassets
for
greater convenience.
For short sound files you might want to go for non-compressed audio formats such as .wav
since they have the best quality and a low cpu impact. The higher disk-space consumption should not be a big deal for short sound files. The longer the files are, you might want to go for a compressed format such as .mp3
etc. pp. Check the compatible audio formats of CoreAudio
.
Fun-fact: There are neat little libraries which make playing sounds even easier. :)
For example: SwiftySound
Produce sounds of different frequencies in Swift
Swift 4.2:
create this class in your project.
import Foundation
import AudioUnit
import AVFoundation
final class ToneOutputUnit: NSObject {
var auAudioUnit: AUAudioUnit! = nil // placeholder for RemoteIO Audio Unit
var avActive = false // AVAudioSession active flag
var audioRunning = false // RemoteIO Audio Unit running flag
var sampleRate : Double = 44100.0 // typical audio sample rate
var f0 = 880.0 // default frequency of tone: 'A' above Concert A
var v0 = 16383.0 // default volume of tone: half full scale
var toneCount : Int32 = 0 // number of samples of tone to play. 0 for silence
private var phY = 0.0 // save phase of sine wave to prevent clicking
private var interrupted = false // for restart from audio interruption notification
func setFrequency(freq : Double) { // audio frequencies below 500 Hz may be
f0 = freq // hard to hear from a tiny iPhone speaker.
}
func setToneVolume(vol : Double) { // 0.0 to 1.0
v0 = vol * 32766.0
}
func setToneTime(t : Double) {
toneCount = Int32(t * sampleRate);
}
func enableSpeaker() {
if audioRunning {
print("returned")
return
} // return if RemoteIO is already running
do { // not running, so start hardware
let audioComponentDescription = AudioComponentDescription(
componentType: kAudioUnitType_Output,
componentSubType: kAudioUnitSubType_SystemOutput, // For output to the local sound system
componentManufacturer: kAudioUnitManufacturer_Apple,
componentFlags: 0,
componentFlagsMask: 0 )
if (auAudioUnit == nil) {
auAudioUnit = try AUAudioUnit(componentDescription: audioComponentDescription)
let bus0 = auAudioUnit.inputBusses[0]
let audioFormat = AVAudioFormat(
commonFormat: AVAudioCommonFormat.pcmFormatInt16, // short int samples
sampleRate: Double(sampleRate),
channels:AVAudioChannelCount(2),
interleaved: true ) // interleaved stereo
try bus0.setFormat(audioFormat ?? AVAudioFormat()) // for speaker bus
auAudioUnit.outputProvider = { ( // AURenderPullInputBlock?
actionFlags,
timestamp,
frameCount,
inputBusNumber,
inputDataList ) -> AUAudioUnitStatus in
self.fillSpeakerBuffer(inputDataList: inputDataList, frameCount: frameCount)
return(0)
}
}
auAudioUnit.isOutputEnabled = true
toneCount = 0
try auAudioUnit.allocateRenderResources() // v2 AudioUnitInitialize()
try auAudioUnit.startHardware() // v2 AudioOutputUnitStart()
audioRunning = true
} catch /* let error as NSError */ {
print("error 2 \(error)")
}
}
// helper functions
private func fillSpeakerBuffer( // process RemoteIO Buffer for output
inputDataList : UnsafeMutablePointer<AudioBufferList>, frameCount : UInt32 ) {
let inputDataPtr = UnsafeMutableAudioBufferListPointer(inputDataList)
let nBuffers = inputDataPtr.count
if (nBuffers > 0) {
let mBuffers : AudioBuffer = inputDataPtr[0]
let count = Int(frameCount)
// Speaker Output == play tone at frequency f0
if ( self.v0 > 0)
&& (self.toneCount > 0 )
{
// audioStalled = false
var v = self.v0 ; if v > 32767 { v = 32767 }
let sz = Int(mBuffers.mDataByteSize)
var a = self.phY // capture from object for use inside block
let d = 2.0 * Double.pi * self.f0 / self.sampleRate // phase delta
let bufferPointer = UnsafeMutableRawPointer(mBuffers.mData)
if var bptr = bufferPointer {
for i in 0..<(count) {
let u = sin(a) // create a sinewave
a += d ; if (a > 2.0 * Double.pi) { a -= 2.0 * Double.pi }
let x = Int16(v * u + 0.5) // scale & round
if (i < (sz / 2)) {
bptr.assumingMemoryBound(to: Int16.self).pointee = x
bptr += 2 // increment by 2 bytes for next Int16 item
bptr.assumingMemoryBound(to: Int16.self).pointee = x
bptr += 2 // stereo, so fill both Left & Right channels
}
}
}
self.phY = a // save sinewave phase
self.toneCount -= Int32(frameCount) // decrement time remaining
} else {
// audioStalled = true
memset(mBuffers.mData, 0, Int(mBuffers.mDataByteSize)) // silence
}
}
}
func stop() {
if (audioRunning) {
auAudioUnit.stopHardware()
audioRunning = false
}
}
}
and in your view controller create object of this class out of viewDidLoad() method.
let myUnit = ToneOutputUnit()
and To create sound of specific frequency (here 10,000) use below code
myUnit.setFrequency(freq: 10000)
myUnit.setToneVolume(vol: currentVolume)
myUnit.enableSpeaker()
myUnit.setToneTime(t: 20000)
and to stop sound
myUnit.stop()
Related Topics
Get the Type of Anyobject Dynamically in Swift
How to Conform an Observableobject to the Codable Protocols
Is There a Github Markdown Language Identifier for Swift Code
Zoom to Fit Current Location and Annotation on Map
Compile Time Key Path Checking in Swift
Using Uilexicon to Implement Autocorrect in iOS 8 Keyboard Extension
Swift Change the Tableviewcell Border Color According to Data
How to Change Text Color of Actionsheet in Swiftui
Avoid Automatic Framework Linking in Swift
Core Data with Pre-Filled .Sqlite (Swift3)
Big O of Accessing a String with an Index in Swift 3.0
Xcode - Build Setting "Excluded_Source_File_Names" Not Working
How to Set Alignment for Wkinterface Label Using Setattributedtext
Swift: Reduce Function with a Closure