Build Video from Uiimage Using Swift

create movie from [UIImage], Swift

Constructing a Dictionary literal is straightforward:

import AVFoundation

let videoSettings = [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: 640,
AVVideoHeightKey: 480
]

As for everything else, I would encourage you to read through Apple's The Swift Programming Language to establish fundamentals first, rather than relying on SO or tutorials that happen to cover what you want to do. "Teach a man to fish", as they say.

Making video from UIImage array with different transition animations

I was stuck in the same problem for a bit of time. Since it is a big answer, it may bore you but I hope it will help you. Here is my answer in Swift 3.

Theory:

Firstly, you have to make a video with your image array. A lot of solutions are available regarding this problem. Just search a bit, hope you will get one. If not, see my code below.
Secondly, you might know that a CALayer can be added over a video. If not, search a bit how to add CALayer over a video.
Thirdly, you have to know, how to animate a CALayer. Core Animation will do the job for you. A huge collection of sample codes are available regarding this. Search for it.
Now you have a CALayer and you know how to animate it. Lastly, you have to add this animated CALayer in your video.

Code:

1.Create a video with your array of images

let outputSize = CGSize(width: 1920, height: 1280)
let imagesPerSecond: TimeInterval = 3 //each image will be stay for 3 secs
var selectedPhotosArray = [UIImage]()
var imageArrayToVideoURL = NSURL()
let audioIsEnabled: Bool = false //if your video has no sound
var asset: AVAsset!

func buildVideoFromImageArray() {
for image in 0..<5 {
selectedPhotosArray.append(UIImage(named: "\(image + 1).JPG")!) //name of the images: 1.JPG, 2.JPG, 3.JPG, 4.JPG, 5.JPG
}

imageArrayToVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video1.MP4")
removeFileAtURLIfExists(url: imageArrayToVideoURL)
guard let videoWriter = try? AVAssetWriter(outputURL: imageArrayToVideoURL as URL, fileType: AVFileTypeMPEG4) else {
fatalError("AVAssetWriter error")
}
let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaTypeVideo) else {
fatalError("Negative : Can't apply the Output settings...")
}
let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))]
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
}
if videoWriter.startWriting() {
let zeroTime = CMTimeMake(Int64(imagesPerSecond),Int32(1))
videoWriter.startSession(atSourceTime: zeroTime)

assert(pixelBufferAdaptor.pixelBufferPool != nil)
let media_queue = DispatchQueue(label: "mediaInputQueue")
videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
let fps: Int32 = 1
let framePerSecond: Int64 = Int64(self.imagesPerSecond)
let frameDuration = CMTimeMake(Int64(self.imagesPerSecond), fps)
var frameCount: Int64 = 0
var appendSucceeded = true
while (!self.selectedPhotosArray.isEmpty) {
if (videoWriterInput.isReadyForMoreMediaData) {
let nextPhoto = self.selectedPhotosArray.remove(at: 0)
let lastFrameTime = CMTimeMake(frameCount * framePerSecond, fps)
let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
var pixelBuffer: CVPixelBuffer? = nil
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
if let pixelBuffer = pixelBuffer, status == 0 {
let managedPixelBuffer = pixelBuffer
CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height)))
let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
//let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0
context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
} else {
print("Failed to allocate pixel buffer")
appendSucceeded = false
}
}
if !appendSucceeded {
break
}
frameCount += 1
}
videoWriterInput.markAsFinished()
videoWriter.finishWriting { () -> Void in
print("-----video1 url = \(self.imageArrayToVideoURL)")

self.asset = AVAsset(url: self.imageArrayToVideoURL as URL)
self.exportVideoWithAnimation()
}
})
}
}

func removeFileAtURLIfExists(url: NSURL) {
if let filePath = url.path {
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
do{
try fileManager.removeItem(atPath: filePath)
} catch let error as NSError {
print("Couldn't remove existing destination file: \(error)")
}
}
}
}

2.Add animation to the created video (Please read all the commented sections carefully. I think some issues will get clear by reading those.)

func exportVideoWithAnimation() {
let composition = AVMutableComposition()

let track = asset?.tracks(withMediaType: AVMediaTypeVideo)
let videoTrack:AVAssetTrack = track![0] as AVAssetTrack
let timerange = CMTimeRangeMake(kCMTimeZero, (asset?.duration)!)

let compositionVideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())

do {
try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero)
compositionVideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}

//if your video has sound, you don’t need to check this
if audioIsEnabled {
let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())

for audioTrack in (asset?.tracks(withMediaType: AVMediaTypeAudio))! {
do {
try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, of: audioTrack, at: kCMTimeZero)
} catch {
print(error)
}
}
}

let size = videoTrack.naturalSize

let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)

let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
parentlayer.addSublayer(videolayer)

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//this is the animation part
var time = [0.00001, 3, 6, 9, 12] //I used this time array to determine the start time of a frame animation. Each frame will stay for 3 secs, thats why their difference is 3
var imgarray = [UIImage]()

for image in 0..<5 {
imgarray.append(UIImage(named: "\(image + 1).JPG")!)

let nextPhoto = imgarray[image]

let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
let aspectRatio = min(horizontalRatio, verticalRatio)
let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0

///I showed 10 animations here. You can uncomment any of this and export a video to see the result.

///#1. left->right///
let blackLayer = CALayer()
blackLayer.frame = CGRect(x: -videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
blackLayer.backgroundColor = UIColor.black.cgColor

let imageLayer = CALayer()
imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
imageLayer.contents = imgarray[image].cgImage
blackLayer.addSublayer(imageLayer)

let animation = CABasicAnimation()
animation.keyPath = "position.x"
animation.fromValue = -videoTrack.naturalSize.width
animation.toValue = 2 * (videoTrack.naturalSize.width)
animation.duration = 3
animation.beginTime = CFTimeInterval(time[image])
animation.fillMode = kCAFillModeForwards
animation.isRemovedOnCompletion = false
blackLayer.add(animation, forKey: "basic")

///#2. right->left///
// let blackLayer = CALayer()
// blackLayer.frame = CGRect(x: 2 * videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
// blackLayer.backgroundColor = UIColor.black.cgColor
//
// let imageLayer = CALayer()
// imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
// imageLayer.contents = imgarray[image].cgImage
// blackLayer.addSublayer(imageLayer)
//
// let animation = CABasicAnimation()
// animation.keyPath = "position.x"
// animation.fromValue = 2 * (videoTrack.naturalSize.width)
// animation.toValue = -videoTrack.naturalSize.width
// animation.duration = 3
// animation.beginTime = CFTimeInterval(time[image])
// animation.fillMode = kCAFillModeForwards
// animation.isRemovedOnCompletion = false
// blackLayer.add(animation, forKey: "basic")

///#3. top->bottom///
// let blackLayer = CALayer()
// blackLayer.frame = CGRect(x: 0, y: 2 * videoTrack.naturalSize.height, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
// blackLayer.backgroundColor = UIColor.black.cgColor
//
// let imageLayer = CALayer()
// imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
// imageLayer.contents = imgarray[image].cgImage
// blackLayer.addSublayer(imageLayer)
//
// let animation = CABasicAnimation()
// animation.keyPath = "position.y"
// animation.fromValue = 2 * videoTrack.naturalSize.height
// animation.toValue = -videoTrack.naturalSize.height
// animation.duration = 3
// animation.beginTime = CFTimeInterval(time[image])
// animation.fillMode = kCAFillModeForwards
// animation.isRemovedOnCompletion = false
// blackLayer.add(animation, forKey: "basic")

///#4. bottom->top///
// let blackLayer = CALayer()
// blackLayer.frame = CGRect(x: 0, y: -videoTrack.naturalSize.height, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
// blackLayer.backgroundColor = UIColor.black.cgColor
//
// let imageLayer = CALayer()
// imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
// imageLayer.contents = imgarray[image].cgImage
// blackLayer.addSublayer(imageLayer)
//
// let animation = CABasicAnimation()
// animation.keyPath = "position.y"
// animation.fromValue = -videoTrack.naturalSize.height
// animation.toValue = 2 * videoTrack.naturalSize.height
// animation.duration = 3
// animation.beginTime = CFTimeInterval(time[image])
// animation.fillMode = kCAFillModeForwards
// animation.isRemovedOnCompletion = false
// blackLayer.add(animation, forKey: "basic")

///#5. opacity(1->0)(left->right)///
// let blackLayer = CALayer()
// blackLayer.frame = CGRect(x: -videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
// blackLayer.backgroundColor = UIColor.black.cgColor
//
// let imageLayer = CALayer()
// imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
// imageLayer.contents = imgarray[image].cgImage
// blackLayer.addSublayer(imageLayer)
//
// let animation = CABasicAnimation()
// animation.keyPath = "position.x"
// animation.fromValue = -videoTrack.naturalSize.width
// animation.toValue = 2 * (videoTrack.naturalSize.width)
// animation.duration = 3
// animation.beginTime = CFTimeInterval(time[image])
// animation.fillMode = kCAFillModeForwards
// animation.isRemovedOnCompletion = false
// blackLayer.add(animation, forKey: "basic")
//
// let fadeOutAnimation = CABasicAnimation(keyPath: "opacity")
// fadeOutAnimation.fromValue = 1
// fadeOutAnimation.toValue = 0
// fadeOutAnimation.duration = 3
// fadeOutAnimation.beginTime = CFTimeInterval(time[image])
// fadeOutAnimation.isRemovedOnCompletion = false
// blackLayer.add(fadeOutAnimation, forKey: "opacity")

///#6. opacity(1->0)(right->left)///
// let blackLayer = CALayer()
// blackLayer.frame = CGRect(x: 2 * videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
// blackLayer.backgroundColor = UIColor.black.cgColor
//
// let imageLayer = CALayer()
// imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
// imageLayer.contents = imgarray[image].cgImage
// blackLayer.addSublayer(imageLayer)
//
// let animation = CABasicAnimation()
// animation.keyPath = "position.x"
// animation.fromValue = 2 * videoTrack.naturalSize.width
// animation.toValue = -videoTrack.naturalSize.width
// animation.duration = 3
// animation.beginTime = CFTimeInterval(time[image])
// animation.fillMode = kCAFillModeForwards
// animation.isRemovedOnCompletion = false
// blackLayer.add(animation, forKey: "basic")
//
// let fadeOutAnimation = CABasicAnimation(keyPath: "opacity")
// fadeOutAnimation.fromValue = 1
// fadeOutAnimation.toValue = 0
// fadeOutAnimation.duration = 3
// fadeOutAnimation.beginTime = CFTimeInterval(time[image])
// fadeOutAnimation.isRemovedOnCompletion = false
// blackLayer.add(fadeOutAnimation, forKey: "opacity")

///#7. opacity(1->0)(top->bottom)///
// let blackLayer = CALayer()
// blackLayer.frame = CGRect(x: 0, y: 2 * videoTrack.naturalSize.height, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
// blackLayer.backgroundColor = UIColor.black.cgColor
//
// let imageLayer = CALayer()
// imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
// imageLayer.contents = imgarray[image].cgImage
// blackLayer.addSublayer(imageLayer)
//
// let animation = CABasicAnimation()
// animation.keyPath = "position.y"
// animation.fromValue = 2 * videoTrack.naturalSize.height
// animation.toValue = -videoTrack.naturalSize.height
// animation.duration = 3
// animation.beginTime = CFTimeInterval(time[image])
// animation.fillMode = kCAFillModeForwards
// animation.isRemovedOnCompletion = false
// blackLayer.add(animation, forKey: "basic")
//
// let fadeOutAnimation = CABasicAnimation(keyPath: "opacity")
// fadeOutAnimation.fromValue = 1
// fadeOutAnimation.toValue = 0
// fadeOutAnimation.duration = 3
// fadeOutAnimation.beginTime = CFTimeInterval(time[image])
// fadeOutAnimation.isRemovedOnCompletion = false
// blackLayer.add(fadeOutAnimation, forKey: "opacity")

///#8. opacity(1->0)(bottom->top)///
// let blackLayer = CALayer()
// blackLayer.frame = CGRect(x: 0, y: -videoTrack.naturalSize.height, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
// blackLayer.backgroundColor = UIColor.black.cgColor
//
// let imageLayer = CALayer()
// imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
// imageLayer.contents = imgarray[image].cgImage
// blackLayer.addSublayer(imageLayer)
//
// let animation = CABasicAnimation()
// animation.keyPath = "position.y"
// animation.fromValue = -videoTrack.naturalSize.height
// animation.toValue = 2 * videoTrack.naturalSize.height
// animation.duration = 3
// animation.beginTime = CFTimeInterval(time[image])
// animation.fillMode = kCAFillModeForwards
// animation.isRemovedOnCompletion = false
// blackLayer.add(animation, forKey: "basic")
//
// let fadeOutAnimation = CABasicAnimation(keyPath: "opacity")
// fadeOutAnimation.fromValue = 1
// fadeOutAnimation.toValue = 0
// fadeOutAnimation.duration = 3
// fadeOutAnimation.beginTime = CFTimeInterval(time[image])
// fadeOutAnimation.isRemovedOnCompletion = false
// blackLayer.add(fadeOutAnimation, forKey: "opacity")

///#9. scale(small->big->small)///
// let blackLayer = CALayer()
// blackLayer.frame = CGRect(x: 0, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
// blackLayer.backgroundColor = UIColor.black.cgColor
// blackLayer.opacity = 0
//
// let imageLayer = CALayer()
// imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
// imageLayer.contents = imgarray[image].cgImage
// blackLayer.addSublayer(imageLayer)
//
// let scaleAnimation = CAKeyframeAnimation(keyPath: "transform.scale")
// scaleAnimation.values = [0, 1.0, 0]
// scaleAnimation.beginTime = CFTimeInterval(time[image])
// scaleAnimation.duration = 3
// scaleAnimation.isRemovedOnCompletion = false
// blackLayer.add(scaleAnimation, forKey: "transform.scale")
//
// let fadeInOutAnimation = CABasicAnimation(keyPath: "opacity")
// fadeInOutAnimation.fromValue = 1
// fadeInOutAnimation.toValue = 1
// fadeInOutAnimation.duration = 3
// fadeInOutAnimation.beginTime = CFTimeInterval(time[image])
// fadeInOutAnimation.isRemovedOnCompletion = false
// blackLayer.add(fadeInOutAnimation, forKey: "opacity")

///#10. scale(big->small->big)///
// let blackLayer = CALayer()
// blackLayer.frame = CGRect(x: 0, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
// blackLayer.backgroundColor = UIColor.black.cgColor
// blackLayer.opacity = 0
//
// let imageLayer = CALayer()
// imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
// imageLayer.contents = imgarray[image].cgImage
// blackLayer.addSublayer(imageLayer)
//
// let scaleAnimation = CAKeyframeAnimation(keyPath: "transform.scale")
// scaleAnimation.values = [1, 0, 1]
// scaleAnimation.beginTime = CFTimeInterval(time[image])
// scaleAnimation.duration = 3
// scaleAnimation.isRemovedOnCompletion = false
// blackLayer.add(scaleAnimation, forKey: "transform.scale")
//
// let fadeOutAnimation = CABasicAnimation(keyPath: "opacity")
// fadeOutAnimation.fromValue = 1
// fadeOutAnimation.toValue = 1
// fadeOutAnimation.duration = 3
// fadeOutAnimation.beginTime = CFTimeInterval(time[image])
// fadeOutAnimation.isRemovedOnCompletion = false
// blackLayer.add(fadeOutAnimation, forKey: "opacity")

parentlayer.addSublayer(blackLayer)
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = size
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = [layerinstruction]
layercomposition.instructions = [instruction]

let animatedVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video2.mp4")
removeFileAtURLIfExists(url: animatedVideoURL)

guard let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality) else {return}
assetExport.videoComposition = layercomposition
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = animatedVideoURL as URL
assetExport.exportAsynchronously(completionHandler: {
switch assetExport.status{
case AVAssetExportSessionStatus.failed:
print("failed \(String(describing: assetExport.error))")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(String(describing: assetExport.error))")
default:
print("Exported")
}
})
}

Don't forget to import the AVFoundation

How do I export UIImage array as a movie?

Take a look at AVAssetWriter and the rest of the AVFoundation framework. The writer has an input of type AVAssetWriterInput, which in turn has a method called appendSampleBuffer: that lets you add individual frames to a video stream. Essentially you’ll have to:

1) Wire the writer:

NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:somePath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain]; //retain should be removed if ARC

NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];

2) Start a session:

[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:…] //use kCMTimeZero if unsure

3) Write some samples:

// Or you can use AVAssetWriterInputPixelBufferAdaptor.
// That lets you feed the writer input data from a CVPixelBuffer
// that’s quite easy to create from a CGImage.
[writerInput appendSampleBuffer:sampleBuffer];

4) Finish the session:

[writerInput markAsFinished];
[videoWriter endSessionAtSourceTime:…]; //optional can call finishWriting without specifying endTime
[videoWriter finishWriting]; //deprecated in ios6
/*
[videoWriter finishWritingWithCompletionHandler:...]; //ios 6.0+
*/

You’ll still have to fill-in a lot of blanks, but I think that the only really hard remaining part is getting a pixel buffer from a CGImage:

- (CVPixelBufferRef) newPixelBufferFromCGImage: (CGImageRef) image
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
frameSize.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, frameTransform);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

return pxbuffer;
}

frameSize is a CGSize describing your target frame size and frameTransform is a CGAffineTransform that lets you transform the images when you draw them into frames.

Create frames from video in Swift (iOS)

I tried solution from Amin Benarieb, and it seems to work:

static func toImages(fromVideoUrl url: URL) -> [UIImage]? {
let asset = AVAsset(url: url)
guard let reader = try? AVAssetReader(asset: asset) else { return nil }
let videoTrack = asset.tracks(withMediaType: .video).first!
let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey): NSNumber(value: kCVPixelFormatType_32BGRA)]
let trackReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: outputSettings)
reader.add(trackReaderOutput)
reader.startReading()
var images = [UIImage]()
while reader.status == .reading {
autoreleasepool {
if let sampleBuffer = trackReaderOutput.copyNextSampleBuffer() {
if let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let ciImage = CIImage(cvImageBuffer: imageBuffer)
images.append(UIImage(ciImage: ciImage))
}
}
}
}
return images
}


Related Topics



Leave a reply



Submit