Switch front/back camera with AVCaptureSession
What you need to do is reconfigure your AVCaptureSession
Here is what i'm using:
// note that `AVCaptureSession * session`
//
if(session)
{
[session beginConfiguration];
AVCaptureInput *currentCameraInput = [session.inputs objectAtIndex:0];
[session removeInput:currentCameraInput];
AVCaptureDevice *newCamera = nil;
if(((AVCaptureDeviceInput*)currentCameraInput).device.position == AVCaptureDevicePositionBack)
{
newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
}
else
{
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
}
NSError *err = nil;
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:newCamera error:&err];
if(!newVideoInput || err)
{
NSLog(@"Error creating capture device input: %@", err.localizedDescription);
}
else
{
[session addInput:newVideoInput];
}
[session commitConfiguration];
}
// make sure you have this method in your class
//
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
return device;
}
return nil;
}
How do I implement camera changing from front to back camera
Your problem is that you are not checking if you can add input and/or output before adding them to your capture session. Btw you don't need to setup the preview layer again when switching your camera and you need only one captureDevice:
if captureSession.canAddInput(captureDeviceInput) {
captureSession.addInput(captureDeviceInput)
}
and
if captureSession.canAddOutput(photoOutPut) {
captureSession.addOutput(photoOutPut)
}
Your CollectionViewCell code should look something like this:
class MainCameraCollectionViewCell: UICollectionViewCell, AVCapturePhotoCaptureDelegate {
private var captureSession = AVCaptureSession()
private var sessionQueue: DispatchQueue!
private var captureDevice: AVCaptureDevice!
private var photoOutPut: AVCapturePhotoOutput!
private var cameraPreviewLayer: AVCaptureVideoPreviewLayer!
var image: UIImage?
var usingFrontCamera = false
override func awakeFromNib() {
super.awakeFromNib()
setupCaptureSession()
setupDevice()
setupInput()
setupPreviewLayer()
startRunningCaptureSession()
}
func setupCaptureSession(){
captureSession.sessionPreset = .photo
sessionQueue = DispatchQueue(label: "session queue")
}
func setupDevice(usingFrontCamera: Bool = false){
sessionQueue.async {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if usingFrontCamera && device.position == .front {
self.captureDevice = device
} else if device.position == .back {
self.captureDevice = device
}
}
}
}
func setupInput() {
sessionQueue.async {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: self.captureDevice)
if self.captureSession.canAddInput(captureDeviceInput) {
self.captureSession.addInput(captureDeviceInput)
}
self.photoOutPut = AVCapturePhotoOutput()
self.photoOutPut.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format:[AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
if self.captureSession.canAddOutput(self.photoOutPut) {
self.captureSession.addOutput(self.photoOutPut)
}
} catch {
print(error)
}
}
}
func setupPreviewLayer() {
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer.videoGravity = .resizeAspectFill
cameraPreviewLayer.connection?.videoOrientation = .portrait
cameraPreviewLayer.frame = UIScreen.main.bounds
layer.insertSublayer(cameraPreviewLayer, at: 0)
}
func startRunningCaptureSession() {
captureSession.startRunning()
}
@IBAction func cameraButton_TouchUpInside(_ sender: Any) {
let settings = AVCapturePhotoSettings()
photoOutPut.capturePhoto(with: settings, delegate: self)
}
//Flip to front and back camera
@IBAction func FlipThe_camera(_ sender: UIButton) {
captureSession.beginConfiguration()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
captureSession.removeInput(input)
}
}
usingFrontCamera = !usingFrontCamera
setupCaptureSession()
setupDevice(usingFrontCamera: usingFrontCamera)
setupInput()
captureSession.commitConfiguration()
startRunningCaptureSession()
}
}
Sample project
How can I switch between 11 pro cameras using AVFoundation in Xcode
Welcome!
Check you this initializer for AVCaptureDevice
. You can specify the DeviceType
you want to use, like .builtInUltraWideCamera
or .builtInTelephotoCamera
.
You can use a AVCaptureDevice.DiscoverySession
to get a list of all capture devices available to your app.
Related Topics
Xcode 6 - Launch Simulator from Command Line
iOS Static VS Dynamic Frameworks Clarifications
Date to Milliseconds and Back to Date in Swift
Trying to Set Only Time in Uidatepicker in Swift 2.0
iOS 6 Facebook Posting Procedure Ends Up with "Remote_App_Id Does Not Match Stored Id"
How to Fill a Uibezierpath with a Gradient
How to Remove Provisioning Profiles from Xcode
How to Use Uiscrollview in Interface Builder
Avcapturesession with Multiple Previews
Difference Between Dispatchqueue.Main.Async and Dispatchqueue.Main.Sync
Prevent Deploying (Disable) Watchkit App with iOS iPhone App in Xcode
Add Entry to iOS .Plist File via Cordova Config.Xml
Command /Usr/Bin/Codesign Failed with Exit Code 1
Performance Testing in Swift Using Tdd
Setting Tableheaderview Height Dynamically
How to Display Activity Indicator in Center of Uialertcontroller