Captureoutput Function Isn't Called Using Setsamplebufferdelegate

AVCaptureVideoDataOutput captureOutput not being called

You need to define the didOutputSampleBuffer delegate callback to actually receive the captured frames:

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
print("captured \(sampleBuffer)")
}

p.s. I'm not sure about macOS, but viewWillAppear may not be a good place to do initialisation because on iOS at least it can be called multiple times.

didOutputSampleBuffer delegate not called

I found the problem of my error! It's because the delegate that was being called has to be created in the same view controller. here is the modified code:

import UIKit
import AVFoundation
import Accelerate

var customPreviewLayer: AVCaptureVideoPreviewLayer?

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

var captureSession: AVCaptureSession?
var dataOutput: AVCaptureVideoDataOutput?
//var customPreviewLayer: AVCaptureVideoPreviewLayer?

@IBOutlet weak var camView: UIView!

override func viewWillAppear(animated: Bool) {
super.viewDidAppear(animated)
//setupCameraSession()
}

override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
//captureSession?.startRunning()
setupCameraSession()
self.captureSession?.startRunning()
}

override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}

func setupCameraSession() {
// Session
self.captureSession = AVCaptureSession()
self.captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080
// Capture device
let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
var deviceInput = AVCaptureDeviceInput()
// Device input
//var deviceInput: AVCaptureDeviceInput? = AVCaptureDeviceInput.deviceInputWithDevice(inputDevice, error: error)
do {
deviceInput = try AVCaptureDeviceInput(device: inputDevice)

} catch let error as NSError {
// Handle errors
print(error)
}
if self.captureSession!.canAddInput(deviceInput) {
self.captureSession!.addInput(deviceInput)
}
// Preview
customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
customPreviewLayer!.frame = camView.bounds
customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
self.camView.layer.addSublayer(customPreviewLayer!)
print("Cam layer added")

self.dataOutput = AVCaptureVideoDataOutput()
self.dataOutput!.videoSettings = [
String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
]

self.dataOutput!.alwaysDiscardsLateVideoFrames = true
if self.captureSession!.canAddOutput(dataOutput) {
self.captureSession!.addOutput(dataOutput)
}
self.captureSession!.commitConfiguration()
let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL)
//let delegate = VideoDelegate()
self.dataOutput!.setSampleBufferDelegate(self, queue: queue)
}

func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
print("buffered")
let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, 0)
let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0)
let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0)
let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0)
let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()!
let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.PremultipliedLast.rawValue)!//problematic

let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)!
dispatch_sync(dispatch_get_main_queue(), {() -> Void in
customPreviewLayer!.contents = dstImageFilter as AnyObject
})
}

}

AVCaptureDeviceOutput not calling delegate method captureOutput

Your session is a local variable. Its scope is limited to viewDidLoad. Since this is a new project, I assume it's safe to say that you're using ARC. In that case that object won't leak and therefore continue to live as it would have done in the linked question, rather the compiler will ensure the object is deallocated before viewDidLoad exits.

Hence your session isn't running because it no longer exists.

(aside: the self.theImage.image = ... is unsafe since it performs a UIKit action of the main queue; you probably want to dispatch_async that over to dispatch_get_main_queue())

So, sample corrections:

@implementation YourViewController
{
AVCaptureSession *session;
}

- (void)viewDidLoad {

[super viewDidLoad];

// Initialize AV session
session = [AVCaptureSession new];

if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone)
[session setSessionPreset:AVCaptureSessionPreset640x480];
else
/* ... etc ... */
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

NSLog(@"delegate method called");

CGImageRef cgImage = [self imageFromSampleBuffer:sampleBuffer];

dispatch_sync(dispatch_get_main_queue(),
^{
self.theImage.image = [UIImage imageWithCGImage: cgImage ];
CGImageRelease( cgImage );
});
}

Most people advocate using an underscore at the beginning of instance variable names nowadays but I omitted it for simplicity. You can use Xcode's built in refactor tool to fix that up after you've verified that the diagnosis is correct.

I moved the CGImageRelease inside the block sent to the main queue to ensure its lifetime extends beyond its capture into a UIImage. I'm not immediately able to find any documentation to confirm that CoreFoundation objects have their lifetime automatically extended when captured in a block.

CaptureOutput delegate function to run as fast as AVCaptureDevice's frame rate

To solve this problem I changed the AVCaptureDevice's frame rate just like the last answer in this post AVCapture capturing and getting framebuffer at 60 fps in iOS 7

In order to get the captureOutput function to run at the same speed as the camera you have to add the AVCaptureDeviceInput before you change the AVCaptureDevice's frame rate.

Here's what the code ended up looking like for Swift 3:

do{

input = try AVCaptureDeviceInput(device: backCamera)
if (error == nil && (captureSession?.canAddInput(input))!)
{
captureSession?.addInput(input)

}

var finalFormat = AVCaptureDeviceFormat()
var maxFPS: Double = 0
for vformat in (backCamera?.formats)!{
var ranges = (vformat as AnyObject).videoSupportedFrameRateRanges as! [AVFrameRateRange]
let frameRates = ranges[0]

if frameRates.maxFrameRate >= maxFPS && frameRates.maxFrameRate <= 120.0{
maxFPS = frameRates.maxFrameRate
finalFormat = vformat as! AVCaptureDeviceFormat
}

}
if maxFPS != 0{

let timeValue = Int64(1200.0 / maxFPS)
let timeScale: Int64 = 1200
try backCamera!.lockForConfiguration()

backCamera!.activeFormat = finalFormat
//print("CAMER FPS: \(backCamera?.activeFormat.videoSupportedFrameRateRanges.description)\n")
backCamera!.activeVideoMinFrameDuration = CMTimeMake(timeValue, Int32(timeScale))
backCamera!.activeVideoMaxFrameDuration = CMTimeMake(timeValue, Int32(timeScale))
backCamera!.focusMode = AVCaptureFocusMode.autoFocus
backCamera!.unlockForConfiguration()
}
}catch{
print("Problem setting FPS\n")
exit(0)
}

ios capturing image using AVFramework

Add the following line

output.minFrameDuration = CMTimeMake(5, 1);

below the comment

 // If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.

but above the

[session startRunning];

Edit

Use the following code to preview the camera output.

AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];
UIView *aView = self.view;
CGRect videoRect = CGRectMake(0.0, 0.0, 320.0, 150.0);
previewLayer.frame = videoRect; // Assume you want the preview layer to fill the view.
[aView.layer addSublayer:previewLayer];

Edit 2:
Ok fine..

Apple has provided a way to set the minFrameDuration here

So now, use the following code to set the frame duration

AVCaptureConnection *conn = [output connectionWithMediaType:AVMediaTypeVideo];

if (conn.supportsVideoMinFrameDuration)
conn.videoMinFrameDuration = CMTimeMake(5,1);
if (conn.supportsVideoMaxFrameDuration)
conn.videoMaxFrameDuration = CMTimeMake(5,1);


Related Topics



Leave a reply



Submit