mirror of https://github.com/opencv/opencv.git
added ios camera support in highgui. turned on optimization in opencv2.framework (ticket #2232)
parent
c26d543e1e
commit
8efb84b5a4
7 changed files with 1398 additions and 1 deletions
@ -0,0 +1,160 @@ |
||||
/*
|
||||
* cap_ios.h |
||||
* For iOS video I/O |
||||
* by Eduard Feicho on 29/07/12 |
||||
* Copyright 2012. All rights reserved. |
||||
* |
||||
* Redistribution and use in source and binary forms, with or without |
||||
* modification, are permitted provided that the following conditions are met: |
||||
* |
||||
* 1. Redistributions of source code must retain the above copyright notice, |
||||
* this list of conditions and the following disclaimer. |
||||
* 2. Redistributions in binary form must reproduce the above copyright notice, |
||||
* this list of conditions and the following disclaimer in the documentation |
||||
* and/or other materials provided with the distribution. |
||||
* 3. The name of the author may not be used to endorse or promote products |
||||
* derived from this software without specific prior written permission. |
||||
* |
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED |
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
* |
||||
*/ |
||||
|
||||
#import <UIKit/UIKit.h> |
||||
#import <Accelerate/Accelerate.h> |
||||
#import <AVFoundation/AVFoundation.h> |
||||
#import <ImageIO/ImageIO.h> |
||||
#include "opencv2/core/core.hpp" |
||||
|
||||
/////////////////////////////////////// CvAbstractCamera /////////////////////////////////////
|
||||
|
||||
@class CvAbstractCamera; |
||||
|
||||
@interface CvAbstractCamera : NSObject |
||||
{ |
||||
AVCaptureSession* captureSession; |
||||
AVCaptureConnection* videoCaptureConnection; |
||||
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer; |
||||
|
||||
UIDeviceOrientation currentDeviceOrientation; |
||||
|
||||
BOOL cameraAvailable; |
||||
BOOL captureSessionLoaded; |
||||
BOOL running; |
||||
BOOL useAVCaptureVideoPreviewLayer; |
||||
|
||||
AVCaptureDevicePosition defaultAVCaptureDevicePosition; |
||||
AVCaptureVideoOrientation defaultAVCaptureVideoOrientation; |
||||
NSString *const defaultAVCaptureSessionPreset; |
||||
|
||||
int defaultFPS; |
||||
|
||||
UIView* parentView; |
||||
|
||||
int imageWidth; |
||||
int imageHeight; |
||||
} |
||||
|
||||
@property (nonatomic, retain) AVCaptureSession* captureSession; |
||||
@property (nonatomic, retain) AVCaptureConnection* videoCaptureConnection; |
||||
|
||||
@property (nonatomic, readonly) BOOL running; |
||||
@property (nonatomic, readonly) BOOL captureSessionLoaded; |
||||
|
||||
@property (nonatomic, assign) int defaultFPS; |
||||
@property (nonatomic, assign) AVCaptureDevicePosition defaultAVCaptureDevicePosition; |
||||
@property (nonatomic, assign) AVCaptureVideoOrientation defaultAVCaptureVideoOrientation; |
||||
@property (nonatomic, assign) BOOL useAVCaptureVideoPreviewLayer; |
||||
@property (nonatomic, strong) NSString *const defaultAVCaptureSessionPreset; |
||||
|
||||
@property (nonatomic, assign) int imageWidth; |
||||
@property (nonatomic, assign) int imageHeight; |
||||
|
||||
@property (nonatomic, retain) UIView* parentView; |
||||
|
||||
- (void)pause; |
||||
- (void)start; |
||||
- (void)stop; |
||||
- (void)switchCameras; |
||||
|
||||
- (id)initWithParentView:(UIView*)parent; |
||||
|
||||
- (void)createCaptureOutput; |
||||
- (void)createVideoPreviewLayer; |
||||
- (void)updateOrientation; |
||||
|
||||
|
||||
@end |
||||
|
||||
///////////////////////////////// CvVideoCamera ///////////////////////////////////////////
|
||||
|
||||
@class CvVideoCamera; |
||||
|
||||
@protocol CvVideoCameraDelegate <NSObject> |
||||
|
||||
#ifdef __cplusplus |
||||
// delegate method for processing image frames
|
||||
- (void)processImage:(cv::Mat&)image; |
||||
#endif |
||||
|
||||
@end |
||||
|
||||
@interface CvVideoCamera : CvAbstractCamera<AVCaptureVideoDataOutputSampleBufferDelegate> |
||||
{ |
||||
AVCaptureVideoDataOutput *videoDataOutput; |
||||
|
||||
dispatch_queue_t videoDataOutputQueue; |
||||
CALayer *customPreviewLayer; |
||||
|
||||
BOOL grayscaleMode; |
||||
|
||||
BOOL recordVideo; |
||||
AVAssetWriterInput* recordAssetWriterInput; |
||||
AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor; |
||||
AVAssetWriter* recordAssetWriter; |
||||
|
||||
} |
||||
|
||||
@property (nonatomic, assign) id<CvVideoCameraDelegate> delegate; |
||||
@property (nonatomic, assign) BOOL grayscaleMode; |
||||
|
||||
@property (nonatomic, assign) BOOL recordVideo; |
||||
@property (nonatomic, retain) AVAssetWriterInput* recordAssetWriterInput; |
||||
@property (nonatomic, retain) AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor; |
||||
@property (nonatomic, retain) AVAssetWriter* recordAssetWriter; |
||||
|
||||
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation; |
||||
- (void)layoutPreviewLayer; |
||||
- (void)saveVideo; |
||||
|
||||
@end |
||||
|
||||
///////////////////////////////// CvPhotoCamera ///////////////////////////////////////////
|
||||
|
||||
@class CvPhotoCamera; |
||||
|
||||
@protocol CvPhotoCameraDelegate <NSObject> |
||||
|
||||
- (void)photoCamera:(CvPhotoCamera*)photoCamera capturedImage:(UIImage *)image; |
||||
- (void)photoCameraCancel:(CvPhotoCamera*)photoCamera; |
||||
|
||||
@end |
||||
|
||||
@interface CvPhotoCamera : CvAbstractCamera |
||||
{ |
||||
AVCaptureStillImageOutput *stillImageOutput; |
||||
} |
||||
|
||||
@property (nonatomic, assign) id<CvPhotoCameraDelegate> delegate; |
||||
|
||||
- (void)takePicture; |
||||
|
||||
@end |
@ -0,0 +1,408 @@ |
||||
/* |
||||
* cap_ios_abstract_camera.mm |
||||
* For iOS video I/O |
||||
* by Eduard Feicho on 29/07/12 |
||||
* Copyright 2012. All rights reserved. |
||||
* |
||||
* Redistribution and use in source and binary forms, with or without |
||||
* modification, are permitted provided that the following conditions are met: |
||||
* |
||||
* 1. Redistributions of source code must retain the above copyright notice, |
||||
* this list of conditions and the following disclaimer. |
||||
* 2. Redistributions in binary form must reproduce the above copyright notice, |
||||
* this list of conditions and the following disclaimer in the documentation |
||||
* and/or other materials provided with the distribution. |
||||
* 3. The name of the author may not be used to endorse or promote products |
||||
* derived from this software without specific prior written permission. |
||||
* |
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED |
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
* |
||||
*/ |
||||
|
||||
|
||||
#import "opencv2/highgui/cap_ios.h" |
||||
#include "precomp.hpp" |
||||
|
||||
#pragma mark - Private Interface |
||||
|
||||
@interface CvAbstractCamera () |
||||
|
||||
@property (nonatomic, retain) AVCaptureVideoPreviewLayer* captureVideoPreviewLayer; |
||||
|
||||
- (void)deviceOrientationDidChange:(NSNotification*)notification; |
||||
- (void)startCaptureSession; |
||||
|
||||
- (void)setDesiredCameraPosition:(AVCaptureDevicePosition)desiredPosition; |
||||
|
||||
- (void)updateSize; |
||||
|
||||
@end |
||||
|
||||
|
||||
#pragma mark - Implementation |
||||
|
||||
|
||||
@implementation CvAbstractCamera |
||||
|
||||
|
||||
|
||||
#pragma mark Public |
||||
|
||||
@synthesize imageWidth; |
||||
@synthesize imageHeight; |
||||
|
||||
|
||||
@synthesize defaultFPS; |
||||
@synthesize defaultAVCaptureDevicePosition; |
||||
@synthesize defaultAVCaptureVideoOrientation; |
||||
@synthesize defaultAVCaptureSessionPreset; |
||||
|
||||
|
||||
|
||||
@synthesize captureSession; |
||||
@synthesize captureVideoPreviewLayer; |
||||
@synthesize videoCaptureConnection; |
||||
@synthesize running; |
||||
@synthesize captureSessionLoaded; |
||||
@synthesize useAVCaptureVideoPreviewLayer; |
||||
|
||||
@synthesize parentView; |
||||
|
||||
#pragma mark - Constructors |
||||
|
||||
- (id)init; |
||||
{ |
||||
self = [super init]; |
||||
if (self) { |
||||
// react to device orientation notifications |
||||
[[NSNotificationCenter defaultCenter] addObserver:self |
||||
selector:@selector(deviceOrientationDidChange:) |
||||
name:UIDeviceOrientationDidChangeNotification |
||||
object:nil]; |
||||
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |
||||
currentDeviceOrientation = [[UIDevice currentDevice] orientation]; |
||||
|
||||
|
||||
// check if camera available |
||||
cameraAvailable = [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]; |
||||
NSLog(@"camera available: %@", (cameraAvailable == YES ? @"YES" : @"NO") ); |
||||
|
||||
running = NO; |
||||
|
||||
// set camera default configuration |
||||
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront; |
||||
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft; |
||||
self.defaultFPS = 15; |
||||
self.defaultAVCaptureSessionPreset = AVCaptureSessionPreset352x288; |
||||
|
||||
self.parentView = nil; |
||||
self.useAVCaptureVideoPreviewLayer = NO; |
||||
} |
||||
return self; |
||||
} |
||||
|
||||
|
||||
|
||||
- (id)initWithParentView:(UIView*)parent; |
||||
{ |
||||
self = [super init]; |
||||
if (self) { |
||||
// react to device orientation notifications |
||||
[[NSNotificationCenter defaultCenter] addObserver:self |
||||
selector:@selector(deviceOrientationDidChange:) |
||||
name:UIDeviceOrientationDidChangeNotification |
||||
object:nil]; |
||||
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |
||||
currentDeviceOrientation = [[UIDevice currentDevice] orientation]; |
||||
|
||||
|
||||
// check if camera available |
||||
cameraAvailable = [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]; |
||||
NSLog(@"camera available: %@", (cameraAvailable == YES ? @"YES" : @"NO") ); |
||||
|
||||
running = NO; |
||||
|
||||
// set camera default configuration |
||||
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront; |
||||
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft; |
||||
self.defaultFPS = 15; |
||||
self.defaultAVCaptureSessionPreset = AVCaptureSessionPreset640x480; |
||||
|
||||
self.parentView = parent; |
||||
self.useAVCaptureVideoPreviewLayer = YES; |
||||
} |
||||
return self; |
||||
} |
||||
|
||||
|
||||
|
||||
- (void)dealloc; |
||||
{ |
||||
[[NSNotificationCenter defaultCenter] removeObserver:self]; |
||||
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; |
||||
} |
||||
|
||||
|
||||
#pragma mark - Public interface |
||||
|
||||
|
||||
- (void)start; |
||||
{ |
||||
if (![NSThread isMainThread]) { |
||||
NSLog(@"[Camera] Warning: Call start only from main thread"); |
||||
[self performSelectorOnMainThread:@selector(start) withObject:nil waitUntilDone:NO]; |
||||
return; |
||||
} |
||||
|
||||
if (running == YES) { |
||||
return; |
||||
} |
||||
running = YES; |
||||
|
||||
// TOOD update image size data before actually starting (needed for recording) |
||||
[self updateSize]; |
||||
|
||||
if (cameraAvailable) { |
||||
[self startCaptureSession]; |
||||
} |
||||
} |
||||
|
||||
|
||||
- (void)pause; |
||||
{ |
||||
running = NO; |
||||
[self.captureSession stopRunning]; |
||||
} |
||||
|
||||
|
||||
|
||||
- (void)stop; |
||||
{ |
||||
running = NO; |
||||
|
||||
// Release any retained subviews of the main view. |
||||
// e.g. self.myOutlet = nil; |
||||
|
||||
[self.captureSession stopRunning]; |
||||
self.captureSession = nil; |
||||
self.captureVideoPreviewLayer = nil; |
||||
self.videoCaptureConnection = nil; |
||||
captureSessionLoaded = NO; |
||||
} |
||||
|
||||
|
||||
|
||||
// use front/back camera |
||||
- (void)switchCameras; |
||||
{ |
||||
BOOL was_running = self.running; |
||||
if (was_running) { |
||||
[self stop]; |
||||
} |
||||
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) { |
||||
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack; |
||||
} else { |
||||
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront; |
||||
} |
||||
if (was_running) { |
||||
[self start]; |
||||
} |
||||
} |
||||
|
||||
|
||||
|
||||
#pragma mark - Device Orientation Changes |
||||
|
||||
|
||||
- (void)deviceOrientationDidChange:(NSNotification*)notification |
||||
{ |
||||
UIDeviceOrientation orientation = [UIDevice currentDevice].orientation; |
||||
|
||||
switch (orientation) |
||||
{ |
||||
case UIDeviceOrientationPortrait: |
||||
case UIDeviceOrientationPortraitUpsideDown: |
||||
case UIDeviceOrientationLandscapeLeft: |
||||
case UIDeviceOrientationLandscapeRight: |
||||
currentDeviceOrientation = orientation; |
||||
break; |
||||
|
||||
case UIDeviceOrientationFaceUp: |
||||
case UIDeviceOrientationFaceDown: |
||||
default: |
||||
break; |
||||
} |
||||
NSLog(@"deviceOrientationDidChange: %d", orientation); |
||||
|
||||
[self updateOrientation]; |
||||
} |
||||
|
||||
|
||||
|
||||
#pragma mark - Private Interface |
||||
|
||||
- (void)createCaptureSession; |
||||
{ |
||||
// set a av capture session preset |
||||
self.captureSession = [[AVCaptureSession alloc] init]; |
||||
if ([self.captureSession canSetSessionPreset:self.defaultAVCaptureSessionPreset]) { |
||||
[self.captureSession setSessionPreset:self.defaultAVCaptureSessionPreset]; |
||||
} else if ([self.captureSession canSetSessionPreset:AVCaptureSessionPresetLow]) { |
||||
[self.captureSession setSessionPreset:AVCaptureSessionPresetLow]; |
||||
} else { |
||||
NSLog(@"[Camera] Error: could not set session preset"); |
||||
} |
||||
} |
||||
|
||||
- (void)createCaptureDevice; |
||||
{ |
||||
// setup the device |
||||
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; |
||||
[self setDesiredCameraPosition:self.defaultAVCaptureDevicePosition]; |
||||
NSLog(@"[Camera] device connected? %@", device.connected ? @"YES" : @"NO"); |
||||
NSLog(@"[Camera] device position %@", (device.position == AVCaptureDevicePositionBack) ? @"back" : @"front"); |
||||
} |
||||
|
||||
|
||||
- (void)createVideoPreviewLayer; |
||||
{ |
||||
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession]; |
||||
|
||||
if ([self.captureVideoPreviewLayer isOrientationSupported]) { |
||||
[self.captureVideoPreviewLayer setOrientation:self.defaultAVCaptureVideoOrientation]; |
||||
} |
||||
|
||||
if (parentView != nil) { |
||||
self.captureVideoPreviewLayer.frame = self.parentView.bounds; |
||||
self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; |
||||
[self.parentView.layer addSublayer:self.captureVideoPreviewLayer]; |
||||
} |
||||
NSLog(@"[Camera] created AVCaptureVideoPreviewLayer"); |
||||
} |
||||
|
||||
|
||||
|
||||
|
||||
- (void)setDesiredCameraPosition:(AVCaptureDevicePosition)desiredPosition; |
||||
{ |
||||
for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { |
||||
if ([device position] == desiredPosition) { |
||||
[self.captureSession beginConfiguration]; |
||||
|
||||
NSError* error; |
||||
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; |
||||
if (!input) { |
||||
NSLog(@"error creating input %@", [error localizedDescription]); |
||||
} |
||||
|
||||
// support for autofocus |
||||
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) { |
||||
NSError *error = nil; |
||||
if ([device lockForConfiguration:&error]) { |
||||
device.focusMode = AVCaptureFocusModeContinuousAutoFocus; |
||||
[device unlockForConfiguration]; |
||||
} else { |
||||
NSLog(@"unable to lock device for autofocos configuration %@", [error localizedDescription]); |
||||
} |
||||
} |
||||
[self.captureSession addInput:input]; |
||||
|
||||
for (AVCaptureInput *oldInput in self.captureSession.inputs) { |
||||
[self.captureSession removeInput:oldInput]; |
||||
} |
||||
[self.captureSession addInput:input]; |
||||
[self.captureSession commitConfiguration]; |
||||
|
||||
break; |
||||
} |
||||
} |
||||
} |
||||
|
||||
|
||||
|
||||
- (void)startCaptureSession |
||||
{ |
||||
if (!cameraAvailable) { |
||||
return; |
||||
} |
||||
|
||||
if (self.captureSessionLoaded == NO) { |
||||
[self createCaptureSession]; |
||||
[self createCaptureDevice]; |
||||
[self createCaptureOutput]; |
||||
|
||||
// setup preview layer |
||||
if (self.useAVCaptureVideoPreviewLayer) { |
||||
[self createVideoPreviewLayer]; |
||||
} else { |
||||
[self createCustomVideoPreview]; |
||||
} |
||||
|
||||
captureSessionLoaded = YES; |
||||
} |
||||
|
||||
[self.captureSession startRunning]; |
||||
} |
||||
|
||||
|
||||
- (void)createCaptureOutput; |
||||
{ |
||||
[NSException raise:NSInternalInconsistencyException |
||||
format:@"You must override %@ in a subclass", NSStringFromSelector(_cmd)]; |
||||
} |
||||
|
||||
- (void)createCustomVideoPreview; |
||||
{ |
||||
[NSException raise:NSInternalInconsistencyException |
||||
format:@"You must override %@ in a subclass", NSStringFromSelector(_cmd)]; |
||||
} |
||||
|
||||
- (void)updateOrientation; |
||||
{ |
||||
// nothing to do here |
||||
} |
||||
|
||||
|
||||
- (void)updateSize; |
||||
{ |
||||
if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetPhoto]) { |
||||
//TODO: find the correct resolution |
||||
self.imageWidth = 640; |
||||
self.imageHeight = 480; |
||||
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetHigh]) { |
||||
//TODO: find the correct resolution |
||||
self.imageWidth = 640; |
||||
self.imageHeight = 480; |
||||
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetMedium]) { |
||||
//TODO: find the correct resolution |
||||
self.imageWidth = 640; |
||||
self.imageHeight = 480; |
||||
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetLow]) { |
||||
//TODO: find the correct resolution |
||||
self.imageWidth = 640; |
||||
self.imageHeight = 480; |
||||
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPreset352x288]) { |
||||
self.imageWidth = 352; |
||||
self.imageHeight = 288; |
||||
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPreset640x480]) { |
||||
self.imageWidth = 640; |
||||
self.imageHeight = 480; |
||||
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPreset1280x720]) { |
||||
self.imageWidth = 1280; |
||||
self.imageHeight = 720; |
||||
} else { |
||||
self.imageWidth = 640; |
||||
self.imageHeight = 480; |
||||
} |
||||
} |
||||
|
||||
@end |
@ -0,0 +1,165 @@ |
||||
/* |
||||
* cap_ios_photo_camera.mm |
||||
* For iOS video I/O |
||||
* by Eduard Feicho on 29/07/12 |
||||
* Copyright 2012. All rights reserved. |
||||
* |
||||
* Redistribution and use in source and binary forms, with or without |
||||
* modification, are permitted provided that the following conditions are met: |
||||
* |
||||
* 1. Redistributions of source code must retain the above copyright notice, |
||||
* this list of conditions and the following disclaimer. |
||||
* 2. Redistributions in binary form must reproduce the above copyright notice, |
||||
* this list of conditions and the following disclaimer in the documentation |
||||
* and/or other materials provided with the distribution. |
||||
* 3. The name of the author may not be used to endorse or promote products |
||||
* derived from this software without specific prior written permission. |
||||
* |
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED |
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
* |
||||
*/ |
||||
|
||||
|
||||
#import "opencv2/highgui/cap_ios.h" |
||||
#include "precomp.hpp" |
||||
|
||||
#pragma mark - Private Interface |
||||
|
||||
|
||||
@interface CvPhotoCamera () |
||||
|
||||
@property (nonatomic, retain) AVCaptureStillImageOutput* stillImageOutput; |
||||
|
||||
@end |
||||
|
||||
|
||||
|
||||
#pragma mark - Implementation |
||||
|
||||
|
||||
@implementation CvPhotoCamera |
||||
|
||||
|
||||
|
||||
#pragma mark Public |
||||
|
||||
@synthesize stillImageOutput; |
||||
@synthesize delegate; |
||||
|
||||
|
||||
#pragma mark - Public interface |
||||
|
||||
|
||||
- (void)takePicture |
||||
{ |
||||
if (cameraAvailable == NO) { |
||||
return; |
||||
} |
||||
cameraAvailable = NO; |
||||
|
||||
|
||||
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.videoCaptureConnection |
||||
completionHandler: |
||||
^(CMSampleBufferRef imageSampleBuffer, NSError *error) |
||||
{ |
||||
if (error == nil && imageSampleBuffer != NULL) |
||||
{ |
||||
// TODO check |
||||
// NSNumber* imageOrientation = [UIImage cgImageOrientationForUIDeviceOrientation:currentDeviceOrientation]; |
||||
// CMSetAttachment(imageSampleBuffer, kCGImagePropertyOrientation, imageOrientation, 1); |
||||
|
||||
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer]; |
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{ |
||||
[self.captureSession stopRunning]; |
||||
|
||||
// Make sure we create objects on the main thread in the main context |
||||
UIImage* newImage = [UIImage imageWithData:jpegData]; |
||||
|
||||
//UIImageOrientation orientation = [newImage imageOrientation]; |
||||
|
||||
// TODO: only apply rotation, don't scale, since we can set this directly in the camera |
||||
/* |
||||
switch (orientation) { |
||||
case UIImageOrientationUp: |
||||
case UIImageOrientationDown: |
||||
newImage = [newImage imageWithAppliedRotationAndMaxSize:CGSizeMake(640.0, 480.0)]; |
||||
break; |
||||
case UIImageOrientationLeft: |
||||
case UIImageOrientationRight: |
||||
newImage = [newImage imageWithMaxSize:CGSizeMake(640.0, 480.0)]; |
||||
default: |
||||
break; |
||||
} |
||||
*/ |
||||
|
||||
// We have captured the image, we can allow the user to take another picture |
||||
cameraAvailable = YES; |
||||
|
||||
NSLog(@"CvPhotoCamera captured image"); |
||||
if (self.delegate) { |
||||
[self.delegate photoCamera:self capturedImage:newImage]; |
||||
} |
||||
|
||||
[self.captureSession startRunning]; |
||||
}); |
||||
} |
||||
}]; |
||||
|
||||
|
||||
} |
||||
|
||||
- (void)stop; |
||||
{ |
||||
[super stop]; |
||||
self.stillImageOutput = nil; |
||||
} |
||||
|
||||
|
||||
#pragma mark - Private Interface |
||||
|
||||
|
||||
- (void)createStillImageOutput; |
||||
{ |
||||
// setup still image output with jpeg codec |
||||
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; |
||||
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil]; |
||||
[self.stillImageOutput setOutputSettings:outputSettings]; |
||||
[self.captureSession addOutput:self.stillImageOutput]; |
||||
|
||||
for (AVCaptureConnection *connection in self.stillImageOutput.connections) { |
||||
for (AVCaptureInputPort *port in [connection inputPorts]) { |
||||
if ([port.mediaType isEqual:AVMediaTypeVideo]) { |
||||
self.videoCaptureConnection = connection; |
||||
break; |
||||
} |
||||
} |
||||
if (self.videoCaptureConnection) { |
||||
break; |
||||
} |
||||
} |
||||
NSLog(@"[Camera] still image output created"); |
||||
} |
||||
|
||||
|
||||
- (void)createCaptureOutput; |
||||
{ |
||||
[self createStillImageOutput]; |
||||
} |
||||
|
||||
- (void)createCustomVideoPreview; |
||||
{ |
||||
//do nothing, always use AVCaptureVideoPreviewLayer |
||||
} |
||||
|
||||
|
||||
@end |
@ -0,0 +1,656 @@ |
||||
/* |
||||
* cap_ios_video_camera.mm |
||||
* For iOS video I/O |
||||
* by Eduard Feicho on 29/07/12 |
||||
* Copyright 2012. All rights reserved. |
||||
* |
||||
* Redistribution and use in source and binary forms, with or without |
||||
* modification, are permitted provided that the following conditions are met: |
||||
* |
||||
* 1. Redistributions of source code must retain the above copyright notice, |
||||
* this list of conditions and the following disclaimer. |
||||
* 2. Redistributions in binary form must reproduce the above copyright notice, |
||||
* this list of conditions and the following disclaimer in the documentation |
||||
* and/or other materials provided with the distribution. |
||||
* 3. The name of the author may not be used to endorse or promote products |
||||
* derived from this software without specific prior written permission. |
||||
* |
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED |
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
* |
||||
*/ |
||||
|
||||
#import "opencv2/highgui/cap_ios.h" |
||||
#include "precomp.hpp" |
||||
|
||||
#import <AssetsLibrary/AssetsLibrary.h> |
||||
|
||||
|
||||
static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; |
||||
|
||||
#pragma mark - Private Interface |
||||
|
||||
|
||||
|
||||
|
||||
@interface CvVideoCamera () |
||||
|
||||
- (void)createVideoDataOutput; |
||||
- (void)createVideoFileOutput; |
||||
|
||||
|
||||
@property (nonatomic, retain) CALayer *customPreviewLayer; |
||||
@property (nonatomic, retain) AVCaptureVideoDataOutput *videoDataOutput; |
||||
|
||||
@end |
||||
|
||||
|
||||
|
||||
#pragma mark - Implementation |
||||
|
||||
|
||||
|
||||
@implementation CvVideoCamera |
||||
|
||||
|
||||
|
||||
|
||||
@synthesize delegate; |
||||
@synthesize grayscaleMode; |
||||
|
||||
@synthesize customPreviewLayer; |
||||
@synthesize videoDataOutput; |
||||
|
||||
@synthesize recordVideo; |
||||
//@synthesize videoFileOutput; |
||||
@synthesize recordAssetWriterInput; |
||||
@synthesize recordPixelBufferAdaptor; |
||||
@synthesize recordAssetWriter; |
||||
|
||||
|
||||
|
||||
#pragma mark - Constructors |
||||
|
||||
- (id)initWithParentView:(UIView*)parent; |
||||
{ |
||||
self = [super initWithParentView:parent]; |
||||
if (self) { |
||||
self.useAVCaptureVideoPreviewLayer = NO; |
||||
self.recordVideo = NO; |
||||
} |
||||
return self; |
||||
} |
||||
|
||||
|
||||
|
||||
#pragma mark - Public interface |
||||
|
||||
|
||||
- (void)start; |
||||
{ |
||||
[super start]; |
||||
|
||||
if (self.recordVideo == YES) { |
||||
// [self.videoFileOutput startRecordingToOutputFileURL:[self tempFileURL] recordingDelegate:self]; |
||||
|
||||
NSError* error; |
||||
if ([[NSFileManager defaultManager] fileExistsAtPath:[self tempFileString]]) [[NSFileManager defaultManager] removeItemAtPath:[self tempFileString] error:&error]; |
||||
if (error == nil) { |
||||
NSLog(@"[Camera] Delete file %@", [self tempFileString]); |
||||
} |
||||
|
||||
|
||||
BOOL started = [self.recordAssetWriter startWriting]; |
||||
[self.recordAssetWriter startSessionAtSourceTime:kCMTimeZero]; |
||||
|
||||
NSLog(@"[Camera] Session started? %d", started); |
||||
|
||||
if (self.recordAssetWriter.status == AVAssetWriterStatusUnknown) { |
||||
NSLog(@"AVAssetWriter status: unknown"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) { |
||||
NSLog(@"AVAssetWriter status: writing"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusCompleted) { |
||||
NSLog(@"AVAssetWriter status: completed"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusFailed) { |
||||
NSLog(@"AVAssetWriter status: failed"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusCancelled) { |
||||
NSLog(@"AVAssetWriter status: cancelled"); |
||||
} |
||||
|
||||
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) { |
||||
NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error); |
||||
} else { |
||||
NSLog(@"[Camera] Recording started"); |
||||
} |
||||
} |
||||
} |
||||
|
||||
|
||||
|
||||
- (void)pause; |
||||
{ |
||||
[super pause]; |
||||
if (self.recordVideo == YES) { |
||||
// [self.videoFileOutput stopRecording]; |
||||
|
||||
|
||||
if (self.recordAssetWriter.status == AVAssetWriterStatusUnknown) { |
||||
NSLog(@"AVAssetWriter status: unknown"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) { |
||||
NSLog(@"AVAssetWriter status: writing"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusCompleted) { |
||||
NSLog(@"AVAssetWriter status: completed"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusFailed) { |
||||
NSLog(@"AVAssetWriter status: failed"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusCancelled) { |
||||
NSLog(@"AVAssetWriter status: cancelled"); |
||||
} |
||||
|
||||
if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) { |
||||
[self.recordAssetWriter finishWriting]; |
||||
NSLog(@"[Camera] recording stopped"); |
||||
} else { |
||||
NSLog(@"[Camera] Recording Error: asset writer status is not writing"); |
||||
} |
||||
} |
||||
} |
||||
|
||||
|
||||
- (void)stop; |
||||
{ |
||||
[super stop]; |
||||
|
||||
if (self.recordVideo == YES) { |
||||
NSLog(@"recording stop"); |
||||
if (self.recordAssetWriter.status == AVAssetWriterStatusUnknown) { |
||||
NSLog(@"AVAssetWriter status: unknown"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) { |
||||
NSLog(@"AVAssetWriter status: writing"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusCompleted) { |
||||
NSLog(@"AVAssetWriter status: completed"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusFailed) { |
||||
NSLog(@"AVAssetWriter status: failed"); |
||||
} else if (self.recordAssetWriter.status == AVAssetWriterStatusCancelled) { |
||||
NSLog(@"AVAssetWriter status: cancelled"); |
||||
} |
||||
|
||||
|
||||
if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) { |
||||
[self.recordAssetWriter finishWriting]; |
||||
NSLog(@"[Camera] recording stopped"); |
||||
} else { |
||||
NSLog(@"[Camera] Recording Error: asset writer status is not writing"); |
||||
} |
||||
|
||||
self.recordAssetWriter = nil; |
||||
self.recordAssetWriterInput = nil; |
||||
self.recordPixelBufferAdaptor = nil; |
||||
} |
||||
|
||||
self.videoDataOutput = nil; |
||||
if (videoDataOutputQueue) { |
||||
dispatch_release(videoDataOutputQueue); |
||||
} |
||||
|
||||
[self.customPreviewLayer removeFromSuperlayer]; |
||||
self.customPreviewLayer = nil; |
||||
} |
||||
|
||||
// TODO fix |
||||
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation; |
||||
{ |
||||
|
||||
NSLog(@"layout preview layer"); |
||||
if (self.parentView != nil) { |
||||
|
||||
CALayer* layer = self.customPreviewLayer; |
||||
CGRect bounds = self.customPreviewLayer.bounds; |
||||
int rotation_angle = 0; |
||||
bool flip_bounds = false; |
||||
|
||||
switch (interfaceOrientation) { |
||||
case UIInterfaceOrientationPortrait: |
||||
NSLog(@"to Portrait"); |
||||
rotation_angle = 270; |
||||
break; |
||||
case UIInterfaceOrientationPortraitUpsideDown: |
||||
rotation_angle = 90; |
||||
NSLog(@"to UpsideDown"); |
||||
break; |
||||
case UIInterfaceOrientationLandscapeLeft: |
||||
rotation_angle = 0; |
||||
NSLog(@"to LandscapeLeft"); |
||||
break; |
||||
case UIInterfaceOrientationLandscapeRight: |
||||
rotation_angle = 180; |
||||
NSLog(@"to LandscapeRight"); |
||||
break; |
||||
default: |
||||
break; // leave the layer in its last known orientation |
||||
} |
||||
|
||||
switch (defaultAVCaptureVideoOrientation) { |
||||
case AVCaptureVideoOrientationLandscapeRight: |
||||
rotation_angle += 180; |
||||
break; |
||||
case AVCaptureVideoOrientationPortraitUpsideDown: |
||||
rotation_angle += 270; |
||||
break; |
||||
case AVCaptureVideoOrientationPortrait: |
||||
rotation_angle += 90; |
||||
case AVCaptureVideoOrientationLandscapeLeft: |
||||
break; |
||||
default: |
||||
break; |
||||
} |
||||
rotation_angle = rotation_angle % 360; |
||||
|
||||
if (rotation_angle == 90 || rotation_angle == 270) { |
||||
flip_bounds = true; |
||||
} |
||||
|
||||
if (flip_bounds) { |
||||
NSLog(@"flip bounds"); |
||||
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width); |
||||
} |
||||
|
||||
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.); |
||||
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height); |
||||
|
||||
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) ); |
||||
layer.bounds = bounds; |
||||
} |
||||
|
||||
} |
||||
|
||||
// TODO fix |
||||
- (void)layoutPreviewLayer; |
||||
{ |
||||
NSLog(@"layout preview layer"); |
||||
if (self.parentView != nil) { |
||||
|
||||
CALayer* layer = self.customPreviewLayer; |
||||
CGRect bounds = self.customPreviewLayer.bounds; |
||||
int rotation_angle = 0; |
||||
bool flip_bounds = false; |
||||
|
||||
switch (currentDeviceOrientation) { |
||||
case UIDeviceOrientationPortrait: |
||||
rotation_angle = 270; |
||||
break; |
||||
case UIDeviceOrientationPortraitUpsideDown: |
||||
rotation_angle = 90; |
||||
break; |
||||
case UIDeviceOrientationLandscapeLeft: |
||||
NSLog(@"left"); |
||||
rotation_angle = 180; |
||||
break; |
||||
case UIDeviceOrientationLandscapeRight: |
||||
NSLog(@"right"); |
||||
rotation_angle = 0; |
||||
break; |
||||
case UIDeviceOrientationFaceUp: |
||||
case UIDeviceOrientationFaceDown: |
||||
default: |
||||
break; // leave the layer in its last known orientation |
||||
} |
||||
|
||||
switch (defaultAVCaptureVideoOrientation) { |
||||
case AVCaptureVideoOrientationLandscapeRight: |
||||
rotation_angle += 180; |
||||
break; |
||||
case AVCaptureVideoOrientationPortraitUpsideDown: |
||||
rotation_angle += 270; |
||||
break; |
||||
case AVCaptureVideoOrientationPortrait: |
||||
rotation_angle += 90; |
||||
case AVCaptureVideoOrientationLandscapeLeft: |
||||
break; |
||||
default: |
||||
break; |
||||
} |
||||
rotation_angle = rotation_angle % 360; |
||||
|
||||
if (rotation_angle == 90 || rotation_angle == 270) { |
||||
flip_bounds = true; |
||||
} |
||||
|
||||
if (flip_bounds) { |
||||
NSLog(@"flip bounds"); |
||||
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width); |
||||
} |
||||
|
||||
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.); |
||||
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) ); |
||||
layer.bounds = bounds; |
||||
} |
||||
|
||||
} |
||||
|
||||
|
||||
|
||||
|
||||
#pragma mark - Private Interface |
||||
|
||||
|
||||
|
||||
- (void)createVideoDataOutput; |
||||
{ |
||||
// Make a video data output |
||||
self.videoDataOutput = [AVCaptureVideoDataOutput new]; |
||||
|
||||
// In grayscale mode we want YUV (YpCbCr 4:2:0) so we can directly access the graylevel intensity values (Y component) |
||||
// In color mode we, BGRA format is used |
||||
OSType format = self.grayscaleMode ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA; |
||||
|
||||
self.videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:format] |
||||
forKey:(id)kCVPixelBufferPixelFormatTypeKey]; |
||||
|
||||
// discard if the data output queue is blocked (as we process the still image) |
||||
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; |
||||
|
||||
if ( [self.captureSession canAddOutput:self.videoDataOutput] ) { |
||||
[self.captureSession addOutput:self.videoDataOutput]; |
||||
} |
||||
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES]; |
||||
|
||||
|
||||
// set default FPS |
||||
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMinFrameDuration) { |
||||
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMinFrameDuration = CMTimeMake(1, self.defaultFPS); |
||||
} |
||||
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMaxFrameDuration) { |
||||
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMaxFrameDuration = CMTimeMake(1, self.defaultFPS); |
||||
} |
||||
|
||||
// set video mirroring for front camera (more intuitive) |
||||
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) { |
||||
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) { |
||||
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES; |
||||
} else { |
||||
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO; |
||||
} |
||||
} |
||||
|
||||
// set default video orientation |
||||
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) { |
||||
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation; |
||||
} |
||||
|
||||
|
||||
// create a custom preview layer |
||||
self.customPreviewLayer = [CALayer layer]; |
||||
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height); |
||||
[self layoutPreviewLayer]; |
||||
|
||||
// create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured |
||||
// a serial dispatch queue must be used to guarantee that video frames will be delivered in order |
||||
// see the header doc for setSampleBufferDelegate:queue: for more information |
||||
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL); |
||||
[self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue]; |
||||
|
||||
|
||||
NSLog(@"[Camera] created AVCaptureVideoDataOutput at %d FPS", self.defaultFPS); |
||||
} |
||||
|
||||
|
||||
|
||||
- (void)createVideoFileOutput; |
||||
{ |
||||
/* |
||||
if (self.recordVideo == YES) { |
||||
self.videoFileOutput = [[AVCaptureMovieFileOutput alloc] init]; |
||||
if ( [self.captureSession canAddOutput:self.videoFileOutput] ) { |
||||
[self.captureSession addOutput:self.videoFileOutput]; |
||||
} |
||||
} |
||||
*/ |
||||
|
||||
/* Video File Output in H.264, via AVAsserWriter */ |
||||
NSLog(@"Create Video with dimensions %dx%d", self.imageWidth, self.imageHeight); |
||||
|
||||
NSDictionary *outputSettings |
||||
= [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:self.imageWidth], AVVideoWidthKey, |
||||
[NSNumber numberWithInt:self.imageHeight], AVVideoHeightKey, |
||||
AVVideoCodecH264, AVVideoCodecKey, |
||||
nil |
||||
]; |
||||
|
||||
|
||||
self.recordAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings]; |
||||
|
||||
|
||||
/* I'm going to push pixel buffers to it, so will need a |
||||
AVAssetWriterPixelBufferAdaptor, to expect the same 32BGRA input as I've |
||||
asked the AVCaptureVideDataOutput to supply */ |
||||
int pixelBufferFormat = (self.grayscaleMode == YES) ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA; |
||||
|
||||
self.recordPixelBufferAdaptor = |
||||
[[AVAssetWriterInputPixelBufferAdaptor alloc] |
||||
initWithAssetWriterInput:self.recordAssetWriterInput |
||||
sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:pixelBufferFormat], kCVPixelBufferPixelFormatTypeKey,nil]]; |
||||
|
||||
NSError* error = nil; |
||||
NSLog(@"Create AVAssetWriter with url: %@", [self tempFileURL]); |
||||
self.recordAssetWriter = [AVAssetWriter assetWriterWithURL:[self tempFileURL] |
||||
fileType:AVFileTypeMPEG4 |
||||
error:&error]; |
||||
if (error != nil) { |
||||
NSLog(@"[Camera] Unable to create AVAssetWriter: %@", error); |
||||
} |
||||
|
||||
[self.recordAssetWriter addInput:self.recordAssetWriterInput]; |
||||
self.recordAssetWriterInput.expectsMediaDataInRealTime = NO; |
||||
|
||||
NSLog(@"[Camera] created AVAssetWriter"); |
||||
|
||||
} |
||||
|
||||
|
||||
- (void)createCaptureOutput; |
||||
{ |
||||
[self createVideoDataOutput]; |
||||
if (self.recordVideo == YES) { |
||||
[self createVideoFileOutput]; |
||||
} |
||||
} |
||||
|
||||
- (void)createCustomVideoPreview; |
||||
{ |
||||
[self.parentView.layer addSublayer:self.customPreviewLayer]; |
||||
} |
||||
|
||||
|
||||
#pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate |
||||
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection |
||||
{ |
||||
if (self.delegate) { |
||||
|
||||
// convert from Core Media to Core Video |
||||
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
||||
CVPixelBufferLockBaseAddress(imageBuffer, 0); |
||||
|
||||
void* bufferAddress; |
||||
size_t width; |
||||
size_t height; |
||||
size_t bytesPerRow; |
||||
|
||||
CGColorSpaceRef colorSpace; |
||||
CGContextRef context; |
||||
|
||||
int format_opencv; |
||||
|
||||
OSType format = CVPixelBufferGetPixelFormatType(imageBuffer); |
||||
if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { |
||||
|
||||
format_opencv = CV_8UC1; |
||||
|
||||
bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); |
||||
width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0); |
||||
height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0); |
||||
bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0); |
||||
|
||||
} else { // expect kCVPixelFormatType_32BGRA |
||||
|
||||
format_opencv = CV_8UC4; |
||||
|
||||
bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer); |
||||
width = CVPixelBufferGetWidth(imageBuffer); |
||||
height = CVPixelBufferGetHeight(imageBuffer); |
||||
bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); |
||||
|
||||
} |
||||
|
||||
// delegate image processing to the delegate |
||||
cv::Mat image(height, width, format_opencv, bufferAddress, bytesPerRow); |
||||
|
||||
cv::Mat* result = NULL; |
||||
CGImage* dstImage; |
||||
|
||||
if ([self.delegate respondsToSelector:@selector(processImage:)]) { |
||||
[self.delegate processImage:image]; |
||||
} |
||||
|
||||
// check if matrix data pointer or dimensions were changed by the delegate |
||||
bool iOSimage = true; |
||||
if (height == image.rows && width == image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) { |
||||
iOSimage = false; |
||||
} |
||||
|
||||
|
||||
// (create color space, create graphics context, render buffer) |
||||
CGBitmapInfo bitmapInfo; |
||||
|
||||
// basically we decide if it's a grayscale, rgb or rgba image |
||||
if (image.channels() == 1) { |
||||
colorSpace = CGColorSpaceCreateDeviceGray(); |
||||
bitmapInfo = kCGImageAlphaNone; |
||||
} else if (image.channels() == 3) { |
||||
colorSpace = CGColorSpaceCreateDeviceRGB(); |
||||
bitmapInfo = kCGImageAlphaNone; |
||||
if (iOSimage) { |
||||
bitmapInfo |= kCGBitmapByteOrder32Little; |
||||
} else { |
||||
bitmapInfo |= kCGBitmapByteOrder32Big; |
||||
} |
||||
} else { |
||||
colorSpace = CGColorSpaceCreateDeviceRGB(); |
||||
bitmapInfo = kCGImageAlphaPremultipliedFirst; |
||||
if (iOSimage) { |
||||
bitmapInfo |= kCGBitmapByteOrder32Little; |
||||
} else { |
||||
bitmapInfo |= kCGBitmapByteOrder32Big; |
||||
} |
||||
} |
||||
|
||||
if (iOSimage) { |
||||
context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo); |
||||
dstImage = CGBitmapContextCreateImage(context); |
||||
CGContextRelease(context); |
||||
} else { |
||||
|
||||
NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()]; |
||||
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); |
||||
|
||||
// Creating CGImage from cv::Mat |
||||
dstImage = CGImageCreate(image.cols, // width |
||||
image.rows, // height |
||||
8, // bits per component |
||||
8 * image.elemSize(), // bits per pixel |
||||
image.step, // bytesPerRow |
||||
colorSpace, // colorspace |
||||
bitmapInfo, // bitmap info |
||||
provider, // CGDataProviderRef |
||||
NULL, // decode |
||||
false, // should interpolate |
||||
kCGRenderingIntentDefault // intent |
||||
); |
||||
|
||||
CGDataProviderRelease(provider); |
||||
} |
||||
|
||||
|
||||
|
||||
|
||||
// render buffer |
||||
dispatch_sync(dispatch_get_main_queue(), ^{ |
||||
self.customPreviewLayer.contents = (__bridge id)dstImage; |
||||
}); |
||||
|
||||
|
||||
if (self.recordVideo == YES) { |
||||
// a very dense way to keep track of the time at which this frame |
||||
// occurs relative to the output stream, but it's just an example! |
||||
|
||||
// TODO reset frame number |
||||
static int64_t frameNumber = 0; |
||||
if (self.recordAssetWriterInput.readyForMoreMediaData) { |
||||
[self.recordPixelBufferAdaptor appendPixelBuffer:imageBuffer |
||||
withPresentationTime:CMTimeMake(frameNumber, self.defaultFPS)]; |
||||
} |
||||
frameNumber++; |
||||
} |
||||
|
||||
|
||||
// cleanup |
||||
CGImageRelease(dstImage); |
||||
|
||||
CGColorSpaceRelease(colorSpace); |
||||
|
||||
CVPixelBufferUnlockBaseAddress(imageBuffer, 0); |
||||
} |
||||
} |
||||
|
||||
|
||||
- (void)updateOrientation; |
||||
{ |
||||
NSLog(@"rotate.."); |
||||
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height); |
||||
[self layoutPreviewLayer]; |
||||
} |
||||
|
||||
|
||||
- (void)saveVideo; |
||||
{ |
||||
if (self.recordVideo == NO) { |
||||
return; |
||||
} |
||||
|
||||
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; |
||||
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:[self tempFileURL]]) { |
||||
[library writeVideoAtPathToSavedPhotosAlbum:[self tempFileURL] |
||||
completionBlock:^(NSURL *assetURL, NSError *error){}]; |
||||
} |
||||
} |
||||
|
||||
|
||||
- (NSURL *)tempFileURL; |
||||
{ |
||||
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"]; |
||||
NSURL *outputURL = [NSURL fileURLWithPath:outputPath]; |
||||
NSFileManager *fileManager = [NSFileManager defaultManager]; |
||||
if ([fileManager fileExistsAtPath:outputPath]) { |
||||
NSLog(@"file exists"); |
||||
} |
||||
return outputURL; |
||||
} |
||||
|
||||
|
||||
|
||||
- (NSString *)tempFileString; |
||||
{ |
||||
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"]; |
||||
return outputPath; |
||||
} |
||||
|
||||
@end |
Loading…
Reference in new issue