diff --git a/modules/highgui/include/opencv2/highgui/cap_ios.h b/modules/highgui/include/opencv2/highgui/cap_ios.h index e031764db8..aeb6c42be7 100644 --- a/modules/highgui/include/opencv2/highgui/cap_ios.h +++ b/modules/highgui/include/opencv2/highgui/cap_ios.h @@ -109,6 +109,7 @@ #ifdef __cplusplus // delegate method for processing image frames - (void)processImage:(cv::Mat&)image; + #endif @end @@ -128,8 +129,6 @@ AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor; AVAssetWriter* recordAssetWriter; - CMTime lastSampleTime; - } @property (nonatomic, assign) id delegate; @@ -140,6 +139,7 @@ @property (nonatomic, retain) AVAssetWriterInput* recordAssetWriterInput; @property (nonatomic, retain) AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor; @property (nonatomic, retain) AVAssetWriter* recordAssetWriter; +@property (nonatomic, readonly) int64_t timestampMs; - (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation; - (void)layoutPreviewLayer; diff --git a/modules/highgui/src/cap_ios_abstract_camera.mm b/modules/highgui/src/cap_ios_abstract_camera.mm index f4f706afe7..61ea00ada2 100644 --- a/modules/highgui/src/cap_ios_abstract_camera.mm +++ b/modules/highgui/src/cap_ios_abstract_camera.mm @@ -401,6 +401,7 @@ NSLog(@"[Camera] device connected? %@", device.connected ? @"YES" : @"NO"); NSLog(@"[Camera] device position %@", (device.position == AVCaptureDevicePositionBack) ? @"back" : @"front"); +#if 0 AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; NSError *error = nil; //AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error]; @@ -427,6 +428,7 @@ self.audioCaptureConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio]; NSLog(@"Audio has been setup with callback "); +#endif } diff --git a/modules/highgui/src/cap_ios_video_camera.mm b/modules/highgui/src/cap_ios_video_camera.mm index 343ffde517..fc3cfd4f45 100644 --- a/modules/highgui/src/cap_ios_video_camera.mm +++ b/modules/highgui/src/cap_ios_video_camera.mm @@ -43,6 +43,11 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; @interface CvVideoCamera () { NSString* mediaPath; + int recordCountDown; + CMTime _lastSampleTime; + int64_t _timestampMs; + dispatch_queue_t movieWriterQueue; + } @@ -54,6 +59,7 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; @property (nonatomic, retain) CALayer *customPreviewLayer; @property (nonatomic, retain) AVCaptureVideoDataOutput *videoDataOutput; @property (nonatomic, retain) AVCaptureMovieFileOutput *movieFileOutput; +@property (nonatomic, retain) dispatch_queue_t movieWriterQueue; @end @@ -82,6 +88,9 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; @synthesize recordPixelBufferAdaptor; @synthesize recordAssetWriter; +@synthesize timestampMs = _timestampMs; + + @@ -89,12 +98,14 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; - (id)initWithParentView:(UIView*)parent; { + recordCountDown = 1000000000; self = [super initWithParentView:parent]; if (self) { self.useAVCaptureVideoPreviewLayer = NO; self.recordVideo = NO; self.rotateVideo = NO; } + movieWriterQueue = nil; return self; } @@ -105,6 +116,8 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; - (void)start; { + recordCountDown = 5; + movieWriterQueue = nil; [super start]; if (self.recordVideo == YES) { @@ -116,11 +129,8 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; if (error == nil) { NSLog(@"[Camera] Delete file %@", [self videoFileString]); } - - if ([[NSFileManager defaultManager] fileExistsAtPath:[self mediaFileString]]) { - [[NSFileManager defaultManager] removeItemAtPath:[self mediaFileString] error:&error]; - } } + } @@ -146,6 +156,9 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; self.recordAssetWriter = nil; self.recordAssetWriterInput = nil; self.recordPixelBufferAdaptor = nil; + if (movieWriterQueue) + dispatch_release(movieWriterQueue); + self.movieWriterQueue = nil; } [self.customPreviewLayer removeFromSuperlayer]; @@ -344,6 +357,9 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; [self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue]; + if (self.recordVideo == YES && movieWriterQueue == nil) { + movieWriterQueue = dispatch_queue_create("opencv_movieWriter", DISPATCH_QUEUE_SERIAL); + } NSLog(@"[Camera] created AVCaptureVideoDataOutput at %d FPS", self.defaultFPS); } @@ -477,6 +493,15 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; } #pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate +- (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection +{ + if (connection == self.audioCaptureConnection) { + NSLog(@"Audio sample did drop "); + return; + } + NSLog(@"Video Frame did drop "); +} + - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { @@ -484,7 +509,7 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; (void)connection; if (connection == self.audioCaptureConnection) { - NSLog(@"Audio Sample came in "); + //NSLog(@"Audio Sample came in "); return; } @@ -526,15 +551,22 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; } + + CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + int64_t msec = lastSampleTime.value / (lastSampleTime.timescale / 1000); + _timestampMs = msec; + //NSLog(@"Timestamp %u / %u, msec = %lu ", lastSampleTime.value, lastSampleTime.timescale, msec); + + // delegate image processing to the delegate cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow); - CGImage* dstImage; - if ([self.delegate respondsToSelector:@selector(processImage:)]) { [self.delegate processImage:image]; } + CGImage* dstImage; + // check if matrix data pointer or dimensions were changed by the delegate bool iOSimage = false; if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) { @@ -595,17 +627,20 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; // render buffer + //dispatch_sync(dispatch_get_main_queue(), ^{ dispatch_sync(dispatch_get_main_queue(), ^{ self.customPreviewLayer.contents = (__bridge id)dstImage; }); + if (recordCountDown > 0) + recordCountDown--; + + if (self.recordVideo == YES && recordCountDown <= 0) { + //CMTimeShow(lastSampleTime); - if (self.recordVideo == YES) { - lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); -// CMTimeShow(lastSampleTime); if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) { [self.recordAssetWriter startWriting]; - [self.recordAssetWriter startSessionAtSourceTime:lastSampleTime]; + [self.recordAssetWriter startSessionAtSourceTime:_lastSampleTime]; if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) { NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error); return; @@ -623,10 +658,8 @@ static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; if (pixelBuffer != nullptr) CVPixelBufferRelease(pixelBuffer); } - } - // cleanup CGImageRelease(dstImage);