iOS Audio Trimming

iOS Audio Trimming

Here's the code that I've used to trim audio from a pre-existing file. You'll need to change the M4A related constants if you've saved or are saving to another format.

- (BOOL)trimAudio
{
float vocalStartMarker = <starting time>;
float vocalEndMarker = <ending time>;

NSURL *audioFileInput = <your pre-existing file>;
NSURL *audioFileOutput = <the file you want to create>;

if (!audioFileInput || !audioFileOutput)
{
return NO;
}

[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
AVAsset *asset = [AVAsset assetWithURL:audioFileInput];

AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:asset
presetName:AVAssetExportPresetAppleM4A];

if (exportSession == nil)
{
return NO;
}

CMTime startTime = CMTimeMake((int)(floor(vocalStartMarker * 100)), 100);
CMTime stopTime = CMTimeMake((int)(ceil(vocalEndMarker * 100)), 100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);

exportSession.outputURL = audioFileOutput;
exportSession.outputFileType = AVFileTypeAppleM4A;
exportSession.timeRange = exportTimeRange;

[exportSession exportAsynchronouslyWithCompletionHandler:^
{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
// It worked!
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
// It failed...
}
}];

return YES;
}

There's also Technical Q&A 1730, which gives a slightly more detailed approach.

iOS: trimming audio files with Swift?

Finally I find the answer for my question.It's working fine...I attached the code below. I added the trim audio code in it.It will be useful for those who are trying to merge and trim the audio(swift2.3):

func mixAudio()
{
let currentTime = CFAbsoluteTimeGetCurrent()
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionAudioTrack.preferredVolume = 0.8
let avAsset = AVURLAsset.init(URL: soundFileURL, options: nil)
print("\(avAsset)")
var tracks = avAsset.tracksWithMediaType(AVMediaTypeAudio)
let clipAudioTrack = tracks[0]
do {
try compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset.duration), ofTrack: clipAudioTrack, atTime: kCMTimeZero)
}
catch _ {
}
let compositionAudioTrack1 = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionAudioTrack.preferredVolume = 0.8

let avAsset1 = AVURLAsset.init(URL: soundFileURL1)
print(avAsset1)


var tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
let clipAudioTrack1 = tracks1[0]
do {
try compositionAudioTrack1.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset1.duration), ofTrack: clipAudioTrack1, atTime: kCMTimeZero)
}
catch _ {
}
var paths = NSSearchPathForDirectoriesInDomains(.LibraryDirectory, .UserDomainMask, true)
let CachesDirectory = paths[0]
let strOutputFilePath = CachesDirectory.stringByAppendingString("/Fav")
print(" strOutputFilePath is \n \(strOutputFilePath)")

let requiredOutputPath = CachesDirectory.stringByAppendingString("/Fav.m4a")
print(" requiredOutputPath is \n \(requiredOutputPath)")

soundFile1 = NSURL.fileURLWithPath(requiredOutputPath)
print(" OUtput path is \n \(soundFile1)")
var audioDuration = avAsset.duration
var totalSeconds = CMTimeGetSeconds(audioDuration)
var hours = floor(totalSeconds / 3600)
var minutes = floor(totalSeconds % 3600 / 60)
var seconds = Int64(totalSeconds % 3600 % 60)
print("hours = \(hours), minutes = \(minutes), seconds = \(seconds)")

let recordSettings:[String : AnyObject] = [

AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.Low.rawValue
]
do {
audioRecorder = try AVAudioRecorder(URL: soundFile1, settings: recordSettings)
audioRecorder!.delegate = self
audioRecorder!.meteringEnabled = true
audioRecorder!.prepareToRecord()
}

catch let error as NSError
{
audioRecorder = nil
print(error.localizedDescription)
}

do {

try NSFileManager.defaultManager().removeItemAtURL(soundFile1)
}
catch _ {
}
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
exporter!.outputURL = soundFile1
exporter!.outputFileType = AVFileTypeAppleM4A
let duration = CMTimeGetSeconds(avAsset1.duration)
print(duration)
if (duration < 5.0) {
print("sound is not long enough")
return
}
// e.g. the first 30 seconds
let startTime = CMTimeMake(0, 1)
let stopTime = CMTimeMake(seconds,1)
let exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)
print(exportTimeRange)
exporter!.timeRange = exportTimeRange
print(exporter!.timeRange)


exporter!.exportAsynchronouslyWithCompletionHandler
{() -> Void in
print(" OUtput path is \n \(requiredOutputPath)")
print("export complete: \(CFAbsoluteTimeGetCurrent() - currentTime)")
var url:NSURL?
if self.audioRecorder != nil
{
url = self.audioRecorder!.url
}

else
{
url = self.soundFile1!
print(url)

}

print("playing \(url)")

do
{
print(self.soundFile1)
print(" OUtput path is \n \(requiredOutputPath)")
self.setSessionPlayback()
do {
self.optData = try NSData(contentsOfURL: self.soundFile1!, options: NSDataReadingOptions.DataReadingMappedIfSafe)
print(self.optData)
self.recordencryption = self.optData.base64EncodedStringWithOptions(NSDataBase64EncodingOptions())

// print(self.recordencryption)
self.myImageUploadRequest()


}



self.wasteplayer = try AVAudioPlayer(contentsOfURL: self.soundFile1)
self.wasteplayer.numberOfLoops = 0
self.wasteplayer.play()


}

catch _
{
}

}
}

Audio cutter native iOS element

UI element for audio wave - https://stackoverflow.com/a/64709493/13296047. This post answers exactly how to create a audio wave from recorded audio.

UI element for cutting an audio wave - https://stackoverflow.com/a/52062837/13296047

UI element in the bottom for audio cutting feature - https://stackoverflow.com/a/40670853/13296047

This is a very good Voice Memos clone you can use for reference: https://github.com/HassanElDesouky/VoiceMemosClone

How I can add a audio trim on audio file which is stored on FileManager in swift iOS?

This solved my problem.

 func trimSelectedAudio(){ 

let name = browseData.name!
if let asset = AVURLAsset(url: getDirectory().appendingPathComponent("\(tfTitle.text!).m4a")) as? AVAsset{
exportAsset(asset, fileName: name)
}
}

func exportAsset(_ asset: AVAsset, fileName:String){
let trimmedSoundFileUrl = getDirectory().appendingPathComponent("\(tfTitle.text!)_trimmed.m4a")
print("Saving to \(trimmedSoundFileUrl.absoluteString)")


if let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A){
exporter.outputFileType = AVFileType.m4a
exporter.outputURL = trimmedSoundFileUrl

let duration = CMTimeGetSeconds(asset.duration)
if duration < 5.0{
print("Audio is not song long")
return
}
let startTime = CMTimeMake(Int64(selectAudioRange.selectedMinValue), 1)
let stopTime = CMTimeMake(Int64(selectAudioRange.selectedMaxValue), 1)
exporter.timeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)

exporter.exportAsynchronously(completionHandler: {
print("export complete \(exporter.status)")

switch exporter.status {
case AVAssetExportSessionStatus.failed:

if let e = exporter.error {
print("export failed \(e)")
}

case AVAssetExportSessionStatus.cancelled:
print("export cancelled \(String(describing: exporter.error))")
default:
print("export complete")
self.deleteFileAlreadyPresent()
// change core data data here
}
})
} else{
print("cannot create AVAssetExportSession for asset \(asset)")
}
}

func deleteFileAlreadyPresent(){
let PresentAudioUrl = getDirectory().appendingPathComponent("\(previousAudioName).m4a")
if FileManager.default.fileExists(atPath: PresentAudioUrl.path){
print("Sound exists, removing \(PresentAudioUrl.path)")
do{
if try PresentAudioUrl.checkResourceIsReachable(){
print("is reachable")
self.deleteRecordingFile(audioName: "\(previousAudioName).m4a")
self.saveTrimmedData()
}
// try FileManager.default.removeItem(atPath: trimmedSoundFileUrl.absoluteString)
} catch{
print("Could not remove \(PresentAudioUrl.absoluteString)")
}
}
}

func saveTrimmedData(){
DispatchQueue.main.async {
self.browseData.image = (self.imgToSave.image?.jpeg!)!
self.browseData.note = self.tfNotes.text
self.browseData.name = "\(self.tfTitle.text!)_trimmed"



do{
try self.context.save()
self.goToParentVC()

} catch let error as NSError{
print("Could not save \(error) \(error.userInfo)")
}

}
}

Trim audio with iOS

How about using the AVFoundation? Import the audio file into an AVAsset (composition etc), then you can export it - setting preferred time + duration - to a file.

I wrote a stock function awhile ago that exports an asset to a file, you can also specify an audiomix. As below it exports all of the file, but you could add a NSTimeRange to exporter.timeRange and there you go. I haven't tested that though, but should work(?). Another alternative could be to adjust time ranges when creating the AVAsset + tracks. Of course the exporter only handles m4a (AAC). Sorry if this wasn't what you wanted.

-(void)exportAsset:(AVAsset*)asset toFile:(NSString*)filename overwrite:(BOOL)overwrite withMix:(AVAudioMix*)mix {
//NSArray* availablePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:asset];

AVAssetExportSession* exporter = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetAppleM4A];

if (exporter == nil) {
DLog(@"Failed creating exporter!");
return;
}

DLog(@"Created exporter! %@", exporter);

// Set output file type
DLog(@"Supported file types: %@", exporter.supportedFileTypes);
for (NSString* filetype in exporter.supportedFileTypes) {
if ([filetype isEqualToString:AVFileTypeAppleM4A]) {
exporter.outputFileType = AVFileTypeAppleM4A;
break;
}
}
if (exporter.outputFileType == nil) {
DLog(@"Needed output file type not found? (%@)", AVFileTypeAppleM4A);
return;
}

// Set outputURL
NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* parentDir = [NSString stringWithFormat:@"%@/", [paths objectAtIndex:0]];

NSString* outPath = [NSString stringWithFormat:@"%@%@", parentDir, filename];

NSFileManager* manager = [NSFileManager defaultManager];
if ([manager fileExistsAtPath:outPath]) {
DLog(@"%@ already exists!", outPath);
if (!overwrite) {
DLog(@"Not overwriting, uh oh!");
return;
}
else {
// Overwrite
DLog(@"Overwrite! (delete first)");
NSError* error = nil;
if (![manager removeItemAtPath:outPath error:&error]) {
DLog(@"Failed removing %@, error: %@", outPath, error.description);
return;
}
else {
DLog(@"Removed %@", outPath);
}
}
}

NSURL* const outUrl = [NSURL fileURLWithPath:outPath];
exporter.outputURL = outUrl;
// Specify a time range in case only part of file should be exported
//exporter.timeRange =

if (mix != nil)
exporter.audioMix = mix; // important

DLog(@"Starting export! (%@)", exporter.outputURL);
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
// Export ended for some reason. Check in status
NSString* message;
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:
message = [NSString stringWithFormat:@"Export failed. Error: %@", exporter.error.description];
DLog(@"%@", message);
[self showAlert:message];
break;
case AVAssetExportSessionStatusCompleted: {
/*if (playfileWhenExportFinished) {
DLog(@"playfileWhenExportFinished!");
[self playfileAfterExport:exporter.outputURL];
playfileWhenExportFinished = NO;
}*/
message = [NSString stringWithFormat:@"Export completed: %@", filename];
DLog(@"%@", message);
[self showAlert:message];
break;
}
case AVAssetExportSessionStatusCancelled:
message = [NSString stringWithFormat:@"Export cancelled!"];
DLog(@"%@", message);
[self showAlert:message];
break;
default:
DLog(@"Export unhandled status: %d", exporter.status);
break;
}
}];
}

Audiokit trim audio

Export asynchronously allows for setting the start and end samples:

https://github.com/AudioKit/AudioKit/blob/master/AudioKit/Common/Internals/Audio%20File/AKAudioFile%2BProcessingAsynchronously.swift#L267

/// Exports Asynchronously to a new AKAudiofile with trimming options.
///
...
/// - fromSample: start range in samples
/// - toSample: end range time in samples
...
public func exportAsynchronously(name: String,
baseDir: BaseDirectory,
exportFormat: ExportFormat,
fromSample: Int64 = 0,
toSample: Int64 = 0,
callback: @escaping AsyncProcessCallback) {
let fromFileExt = fileExt.lowercased()


Related Topics



Leave a reply



Submit