How to Capture Picture with Avcapturesession in Swift

How to capture picture with AVCaptureSession in Swift?

AVCaptureSession Sample

import UIKit
import AVFoundation
class ViewController: UIViewController {
let captureSession = AVCaptureSession()
let stillImageOutput = AVCaptureStillImageOutput()
var error: NSError?
override func viewDidLoad() {
super.viewDidLoad()
let devices = AVCaptureDevice.devices().filter{ $0.hasMediaType(AVMediaTypeVideo) && $0.position == AVCaptureDevicePosition.Back }
if let captureDevice = devices.first as? AVCaptureDevice {

captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &error))
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
captureSession.startRunning()
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
previewLayer.bounds = view.bounds
previewLayer.position = CGPointMake(view.bounds.midX, view.bounds.midY)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
let cameraPreview = UIView(frame: CGRectMake(0.0, 0.0, view.bounds.size.width, view.bounds.size.height))
cameraPreview.layer.addSublayer(previewLayer)
cameraPreview.addGestureRecognizer(UITapGestureRecognizer(target: self, action:"saveToCamera:"))
view.addSubview(cameraPreview)
}
}
}
func saveToCamera(sender: UITapGestureRecognizer) {
if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo) {
stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection) {
(imageDataSampleBuffer, error) -> Void in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData), nil, nil, nil)
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}

UIImagePickerController Sample

import UIKit

class ViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate {
let imagePicker = UIImagePickerController()
@IBOutlet weak var imageViewer: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func imagePickerController(picker: UIImagePickerController, didFinishPickingImage image: UIImage!, editingInfo: [NSObject : AnyObject]!) {
dismissViewControllerAnimated(true, completion: nil)
imageViewer.image = image
}
@IBAction func presentImagePicker(sender: AnyObject) {

if UIImagePickerController.isCameraDeviceAvailable( UIImagePickerControllerCameraDevice.Front) {

imagePicker.delegate = self
imagePicker.sourceType = UIImagePickerControllerSourceType.Camera
presentViewController(imagePicker, animated: true, completion: nil)

}
}
}

How can I capture photo and video from the same AVCaptureSession?

I did almost the same functionality that you need.
I've created and configured one capture session.
For video output I used AVCaptureVideoDataOutput class, for audio AVCaptureAudioDataOutput class and for photos - AVCaptureStillImageOutput.

I used AVAssetWriter to record video and audio because I needed to perform custom video manipulations. Recording is performed in

AVCaptureVideoDataOutputSampleBufferDelegate methods.
That delegate method looks like this.

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if !isRecordingVideo {
return
}

if captureOutput == self.videoOutput {
assetVideoWriterQueue.async {
if self.shouldStartWritingSession {
self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
self.shouldStartWritingSession = false
}

if self.assetWriterInputCamera.isReadyForMoreMediaData {
self.assetWriterInputCamera.append(sampleBuffer)
}
}
}

if captureOutput == self.audioOutput {
assetAudioWriterQueue.async {
let shouldStartWritingSession = self.shouldStartWritingSession
if self.assetWriterInputMicrofone.isReadyForMoreMediaData && shouldStartWritingSession == false {
self.assetWriterInputMicrofone.append(sampleBuffer)
}

if shouldStartWritingSession {
print("In audioOutput and CANNOT Record")
}
}
}
}

My still image capturing looks like this:

func captureStillImage(_ completion: @escaping ((Bool, UIImage?) -> Void)) {
guard self.state == .running else {
completion(false, nil)
return
}

backgroundQueue.async {
let connection = self.stillImageOutpup.connection(withMediaType: AVMediaTypeVideo)

self.stillImageOutpup.captureStillImageAsynchronously(from: connection, completionHandler: { (buffer, error) in
defer {
self.state = .running
}

guard let buffer = buffer, let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) else {
DispatchQueue.main.async {
completion(false, nil)
}

return
}

let image = UIImage(data: imageData)

DispatchQueue.main.async {
completion(true, image)
}
})
}
}

You can fine how to use asset writers on StackOverflow.
For example, you may get familiar with this

AVCapture Session To Capture Image SWIFT

You should try adding a new thread when adding input and outputs to the session before starting it. In Apple's documentation they state

Important: The startRunning method is a blocking call which can take some time, therefore you should perform session setup on a serial queue so that the main queue isn't blocked (which keeps the UI responsive). See AVCam for iOS for the canonical implementation example.

Try using a dispatch in the create session method such as below

dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), { // 1
self.captureSession.addOutput(self.stillImageOutput)
self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureDevice, error: &err))
self.captureSession.sessionPreset = AVCaptureSessionPresetPhoto
if err != nil {
println("error: \(err?.localizedDescription)")
}
var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer?.frame = self.cameraView.layer.bounds
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
dispatch_async(dispatch_get_main_queue(), { // 2
// 3
self.cameraView.layer.addSublayer(previewLayer)
self.captureSession.startRunning()
});
});

Capture only camerapreview in AVCapture Swift

This didFinishProcessingPhoto will return the complete image like what camera is seeing. You won't the image directly which is shown in your PreviewLayer. So, in order to get the UIImage of shown PreviewLayer, you can resize the captured image.

Well, resize can also be done in two ways: One keeping aspect ratio and other by passing the exact size. I would recommend to go with aspect ratio because it will ensure that your image won't be squeeze or streched from any size, while passing wrong size won't able to fulfil you requirement.

Resize UIImage passing new CGSize:

extension UIImage {
func scaleImage(toSize newSize: CGSize) -> UIImage? {
var newImage: UIImage?
let newRect = CGRect(x: 0, y: 0, width: newSize.width, height: newSize.height).integral
UIGraphicsBeginImageContextWithOptions(newSize, false, 0)
if let context = UIGraphicsGetCurrentContext(), let cgImage = self.cgImage {
context.interpolationQuality = .high
let flipVertical = CGAffineTransform(a: 1, b: 0, c: 0, d: -1, tx: 0, ty: newSize.height)
context.concatenate(flipVertical)
context.draw(cgImage, in: newRect)
if let img = context.makeImage() {
newImage = UIImage(cgImage: img)
}
UIGraphicsEndImageContext()
}
return newImage
}
}

Usage: capturedImage.scaleImage(toSize: CGSize(width: 300, height: 300))

Resize UIImage keeping aspect ratio:

extension UIImage {
func scaleImage(toWidth newWidth: CGFloat) -> UIImage {
let scale = newWidth / self.size.width
let newHeight = self.size.height * scale
let newSize = CGSize(width: newWidth, height: newHeight)

let renderer = UIGraphicsImageRenderer(size: newSize)

let image = renderer.image { (context) in
self.draw(in: CGRect(origin: CGPoint(x: 0, y: 0), size: newSize))
}
return image
}
}

Usage: capturedImage.scaleImage(toWidth: 300)

Reference: Resize UIImage to 200x200pt/px

Update:

Keep the below method as it is in your code:

@IBAction func cameraButton_Tab(_ sender: Any) {
let settings = AVCapturePhotoSettings()
photoOutput?.capturePhoto(with: settings, delegate: self)
}

extension ViewController: AVCapturePhotoCaptureDelegate {

func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let imageData = photo.fileDataRepresentation(){
let capturedImage = UIImage(data: imageData)
let cropImage = capturedImage.scaleImage(toWidth: cameraPreviewLayer!.frame.size.width) //It will return the Image size of Camera Preview
}
}
}

Take 4:3 photos with AVFoundation in Swift

I believe you should be able to control the image by setting the AVCaptureSession's sessionPreset property. I believe setting this to AVCaptureSessionPresetPhoto should do what you want. But if that doesn't work, by all means do post the code and I'll take a look



Related Topics



Leave a reply



Submit