diff --git a/PBJVision.podspec b/PBJVision.podspec index e4a7ca8..eead096 100644 --- a/PBJVision.podspec +++ b/PBJVision.podspec @@ -1,13 +1,14 @@ Pod::Spec.new do |s| s.name = "PBJVision" - s.version = "0.1.1" + s.version = "0.1.2" s.summary = "iOS camera engine, supports touch-to-record video and photo capture." s.homepage = "https://github.com/piemonte/PBJVision" s.license = "MIT" s.authors = { "Patrick Piemonte" => "piemonte@alumni.cmu.edu" } - s.source = { :git => "https://github.com/piemonte/PBJVision.git", :tag => "v0.1.1" } - s.frameworks = 'Foundation', 'AVFoundation', 'CoreGraphics', 'CoreMedia', 'CoreVideo', 'MobileCoreServices', 'ImageIO', 'QuartzCore' + s.source = { :git => "https://github.com/piemonte/PBJVision.git", :tag => "v0.1.2" } + s.frameworks = 'Foundation', 'AVFoundation', 'CoreGraphics', 'CoreMedia', 'CoreVideo', 'MobileCoreServices', 'ImageIO', 'QuartzCore', 'OpenGLES', 'UIKit' s.platform = :ios, '6.0' s.source_files = 'Source' + s.resources = 'Source/Shaders/*' s.requires_arc = true end diff --git a/Project/Vision.xcodeproj/project.pbxproj b/Project/Vision.xcodeproj/project.pbxproj index 5248a5f..7a22f9d 100644 --- a/Project/Vision.xcodeproj/project.pbxproj +++ b/Project/Vision.xcodeproj/project.pbxproj @@ -14,6 +14,12 @@ 060F7B19179F50B800E27091 /* capture_rec_off@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 060F7B15179F50B800E27091 /* capture_rec_off@2x.png */; }; 060F7B1A179F50B800E27091 /* capture_yep@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 060F7B16179F50B800E27091 /* capture_yep@2x.png */; }; 060F7B1D179F550800E27091 /* capture_flip@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 060F7B1C179F550800E27091 /* capture_flip@2x.png */; }; + 0624D09717D43BB500665930 /* capture_onion_selected@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 0624D09517D43BB500665930 /* capture_onion_selected@2x.png */; }; + 0624D09817D43BB500665930 /* capture_onion@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 0624D09617D43BB500665930 /* capture_onion@2x.png */; }; + 0624D09A17D43D5D00665930 /* OpenGLES.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0624D09917D43D5D00665930 /* OpenGLES.framework */; }; + 067D52F817D8574200541B5E /* GLKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 067D52F717D8574200541B5E /* GLKit.framework */; }; + 067D52FC17D857DC00541B5E /* Shader.fsh in Resources */ = {isa = PBXBuildFile; fileRef = 067D52FA17D857AB00541B5E /* Shader.fsh */; }; + 067D52FD17D857DC00541B5E /* Shader.vsh in Resources */ = {isa = PBXBuildFile; fileRef = 067D52FB17D857AB00541B5E /* Shader.vsh */; }; 0683D1C2179F2E1700EE66D6 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0683D1C1179F2E1700EE66D6 /* Foundation.framework */; }; 0683D1C4179F2E1700EE66D6 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0683D1C3179F2E1700EE66D6 /* CoreGraphics.framework */; }; 0683D1C6179F2E1700EE66D6 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0683D1C5179F2E1700EE66D6 /* UIKit.framework */; }; @@ -45,6 +51,12 @@ 060F7B15179F50B800E27091 /* capture_rec_off@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; name = "capture_rec_off@2x.png"; path = "UI/capture_rec_off@2x.png"; sourceTree = ""; }; 060F7B16179F50B800E27091 /* capture_yep@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; name = "capture_yep@2x.png"; path = "UI/capture_yep@2x.png"; sourceTree = ""; }; 060F7B1C179F550800E27091 /* capture_flip@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; name = "capture_flip@2x.png"; path = "UI/capture_flip@2x.png"; sourceTree = ""; }; + 0624D09517D43BB500665930 /* capture_onion_selected@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; name = "capture_onion_selected@2x.png"; path = "UI/capture_onion_selected@2x.png"; sourceTree = ""; }; + 0624D09617D43BB500665930 /* capture_onion@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; name = "capture_onion@2x.png"; path = "UI/capture_onion@2x.png"; sourceTree = ""; }; + 0624D09917D43D5D00665930 /* OpenGLES.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGLES.framework; path = System/Library/Frameworks/OpenGLES.framework; sourceTree = SDKROOT; }; + 067D52F717D8574200541B5E /* GLKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = GLKit.framework; path = System/Library/Frameworks/GLKit.framework; sourceTree = SDKROOT; }; + 067D52FA17D857AB00541B5E /* Shader.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = Shader.fsh; path = ../Source/Shaders/Shader.fsh; sourceTree = ""; }; + 067D52FB17D857AB00541B5E /* Shader.vsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = Shader.vsh; path = ../Source/Shaders/Shader.vsh; sourceTree = ""; }; 0683D1BE179F2E1700EE66D6 /* Vision.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Vision.app; sourceTree = BUILT_PRODUCTS_DIR; }; 0683D1C1179F2E1700EE66D6 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; }; 0683D1C3179F2E1700EE66D6 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; }; @@ -79,6 +91,8 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 067D52F817D8574200541B5E /* GLKit.framework in Frameworks */, + 0624D09A17D43D5D00665930 /* OpenGLES.framework in Frameworks */, 060F7B0F179F459900E27091 /* AssetsLibrary.framework in Frameworks */, 06B7D031179F33B500F3F527 /* ImageIO.framework in Frameworks */, 06B7D02F179F333400F3F527 /* MobileCoreServices.framework in Frameworks */, @@ -102,10 +116,21 @@ 060F7B15179F50B800E27091 /* capture_rec_off@2x.png */, 060F7B16179F50B800E27091 /* capture_yep@2x.png */, 060F7B1C179F550800E27091 /* capture_flip@2x.png */, + 0624D09517D43BB500665930 /* capture_onion_selected@2x.png */, + 0624D09617D43BB500665930 /* capture_onion@2x.png */, ); name = UI; sourceTree = ""; }; + 067D52F917D8577D00541B5E /* Shaders */ = { + isa = PBXGroup; + children = ( + 067D52FA17D857AB00541B5E /* Shader.fsh */, + 067D52FB17D857AB00541B5E /* Shader.vsh */, + ); + name = Shaders; + sourceTree = ""; + }; 0683D1B5179F2E1700EE66D6 = { isa = PBXGroup; children = ( @@ -128,13 +153,15 @@ isa = PBXGroup; children = ( 060F7B0E179F459900E27091 /* AssetsLibrary.framework */, - 06B7D030179F33B500F3F527 /* ImageIO.framework */, - 06B7D02D179F330E00F3F527 /* MobileCoreServices.framework */, - 06B7D02B179F313800F3F527 /* CoreVideo.framework */, - 06B7D029179F312F00F3F527 /* CoreMedia.framework */, 06B7D027179F307D00F3F527 /* AVFoundation.framework */, - 0683D1C1179F2E1700EE66D6 /* Foundation.framework */, 0683D1C3179F2E1700EE66D6 /* CoreGraphics.framework */, + 06B7D029179F312F00F3F527 /* CoreMedia.framework */, + 06B7D02B179F313800F3F527 /* CoreVideo.framework */, + 0683D1C1179F2E1700EE66D6 /* Foundation.framework */, + 067D52F717D8574200541B5E /* GLKit.framework */, + 06B7D030179F33B500F3F527 /* ImageIO.framework */, + 06B7D02D179F330E00F3F527 /* MobileCoreServices.framework */, + 0624D09917D43D5D00665930 /* OpenGLES.framework */, 0683D1C5179F2E1700EE66D6 /* UIKit.framework */, ); name = Frameworks; @@ -176,6 +203,7 @@ 06B7D026179F2F0800F3F527 /* Vision */ = { isa = PBXGroup; children = ( + 067D52F917D8577D00541B5E /* Shaders */, 06B7D01A179F2E7700F3F527 /* PBJVision.h */, 06B7D01B179F2E7700F3F527 /* PBJVision.m */, 06B7D01C179F2E7700F3F527 /* PBJVisionUtilities.h */, @@ -237,8 +265,11 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + 067D52FC17D857DC00541B5E /* Shader.fsh in Resources */, + 067D52FD17D857DC00541B5E /* Shader.vsh in Resources */, 06B7D025179F2EDC00F3F527 /* Release.xcconfig in Resources */, 060F7B18179F50B800E27091 /* capture_rec_blink@2x.png in Resources */, + 0624D09717D43BB500665930 /* capture_onion_selected@2x.png in Resources */, 06B7D036179F357600F3F527 /* Default.png in Resources */, 0683D1CC179F2E1700EE66D6 /* InfoPlist.strings in Resources */, 060F7B17179F50B800E27091 /* capture_rec_base@2x.png in Resources */, @@ -249,6 +280,7 @@ 06B7D037179F357600F3F527 /* Default@2x.png in Resources */, 060F7B19179F50B800E27091 /* capture_rec_off@2x.png in Resources */, 06B7D024179F2EDC00F3F527 /* Debug.xcconfig in Resources */, + 0624D09817D43BB500665930 /* capture_onion@2x.png in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; diff --git a/Project/Vision/PBJViewController.m b/Project/Vision/PBJViewController.m index f7573d6..0bf20c6 100644 --- a/Project/Vision/PBJViewController.m +++ b/Project/Vision/PBJViewController.m @@ -11,6 +11,7 @@ #import "PBJStrobeView.h" #import +#import @interface UIButton (ExtendedHit) @@ -37,10 +38,14 @@ @interface PBJViewController () < { PBJStrobeView *_strobeView; UIButton *_doneButton; + UIButton *_flipButton; + UIButton *_onionButton; UIView *_previewView; AVCaptureVideoPreviewLayer *_previewLayer; + GLKViewController *_effectsViewController; + UILabel *_instructionLabel; UILongPressGestureRecognizer *_longPressGestureRecognizer; @@ -99,21 +104,28 @@ - (void)_setup // preview _previewView = [[UIView alloc] initWithFrame:CGRectZero]; _previewView.backgroundColor = [UIColor blackColor]; - CGRect previewFrame = CGRectZero; - previewFrame.origin = CGPointMake(0, 60.0f); - CGFloat previewWidth = self.view.frame.size.width; - previewFrame.size = CGSizeMake(previewWidth, previewWidth); + CGRect previewFrame = CGRectMake(0, 60.0f, CGRectGetWidth(self.view.frame), CGRectGetWidth(self.view.frame)); _previewView.frame = previewFrame; - - // add AV layer _previewLayer = [[PBJVision sharedInstance] previewLayer]; - CGRect previewBounds = _previewView.layer.bounds; - _previewLayer.bounds = previewBounds; + _previewLayer.frame = _previewView.bounds; _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; - _previewLayer.position = CGPointMake(CGRectGetMidX(previewBounds), CGRectGetMidY(previewBounds)); [_previewView.layer addSublayer:_previewLayer]; [self.view addSubview:_previewView]; + // onion skin + _effectsViewController = [[GLKViewController alloc] init]; + _effectsViewController.preferredFramesPerSecond = 60; + + GLKView *view = (GLKView *)_effectsViewController.view; + CGRect viewFrame = _previewView.bounds; + view.frame = viewFrame; + view.context = [[PBJVision sharedInstance] context]; + view.contentScaleFactor = [[UIScreen mainScreen] scale]; + view.alpha = 0.5f; + view.hidden = YES; + [[PBJVision sharedInstance] setPresentationFrame:_previewView.frame]; + [_previewView addSubview:_effectsViewController.view]; + // instruction label _instructionLabel = [[UILabel alloc] initWithFrame:self.view.bounds]; _instructionLabel.textAlignment = NSTextAlignmentCenter; @@ -145,17 +157,18 @@ - (void)_setup // flip button _flipButton = [UIButton buttonWithType:UIButtonTypeCustom]; - - UIImage *flipImage = [UIImage imageNamed:@"capture_flip"]; - [_flipButton setImage:flipImage forState:UIControlStateNormal]; - - CGRect flipFrame = _flipButton.frame; - flipFrame.size = CGSizeMake(25.0f, 20.0f); - flipFrame.origin = CGPointMake(10.0f, CGRectGetHeight(self.view.bounds) - 10.0f); - _flipButton.frame = flipFrame; - + [_flipButton setImage:[UIImage imageNamed:@"capture_flip"] forState:UIControlStateNormal]; + _flipButton.frame = CGRectMake(15.0f, CGRectGetHeight(self.view.bounds) - 15.0f, 30.0f, 25.0f); [_flipButton addTarget:self action:@selector(_handleFlipButton:) forControlEvents:UIControlEventTouchUpInside]; [self.view addSubview:_flipButton]; + + // onion button + _onionButton = [UIButton buttonWithType:UIButtonTypeCustom]; + [_onionButton setImage:[UIImage imageNamed:@"capture_onion"] forState:UIControlStateNormal]; + [_onionButton setImage:[UIImage imageNamed:@"capture_onion_selected"] forState:UIControlStateSelected]; + _onionButton.frame = CGRectMake(CGRectGetWidth(self.view.bounds) - 25.0f - 15.0f, CGRectGetHeight(self.view.bounds) - 15.0f, 25.0f, 25.0f); + [_onionButton addTarget:self action:@selector(_handleOnionSkinningButton:) forControlEvents:UIControlEventTouchUpInside]; + [self.view addSubview:_onionButton]; } #pragma mark - view lifecycle @@ -194,17 +207,20 @@ - (void)_startCapture - (void)_pauseCapture { [[PBJVision sharedInstance] pauseVideoCapture]; + _effectsViewController.view.hidden = !_onionButton.selected; } - (void)_resumeCapture { [[PBJVision sharedInstance] resumeVideoCapture]; + _effectsViewController.view.hidden = YES; } - (void)_endCapture { [UIApplication sharedApplication].idleTimerDisabled = NO; [[PBJVision sharedInstance] endVideoCapture]; + _effectsViewController.view.hidden = YES; } - (void)_resetCapture @@ -218,6 +234,7 @@ - (void)_resetCapture [vision setCameraDevice:PBJCameraDeviceBack]; [vision setCameraOrientation:PBJCameraOrientationPortrait]; [vision setFocusMode:PBJFocusModeAutoFocus]; + [vision setVideoRenderingEnabled:YES]; } #pragma mark - UIButton @@ -232,6 +249,13 @@ - (void)_handleFlipButton:(UIButton *)button } } +- (void)_handleOnionSkinningButton:(UIButton *)button +{ + [_onionButton setSelected:!_onionButton.selected]; + if (_recording) + _effectsViewController.view.hidden = !_onionButton.selected; +} + - (void)_handleDoneButton:(UIButton *)button { // resets long press @@ -287,16 +311,6 @@ - (void)visionSessionDidStop:(PBJVision *)vision { } -- (void)visionPreviewDidStart:(PBJVision *)vision -{ - _longPressGestureRecognizer.enabled = YES; -} - -- (void)visionPreviewWillStop:(PBJVision *)vision -{ - _longPressGestureRecognizer.enabled = NO; -} - - (void)visionModeWillChange:(PBJVision *)vision { } diff --git a/Project/Vision/UI/capture_onion@2x.png b/Project/Vision/UI/capture_onion@2x.png new file mode 100644 index 0000000..d92896d Binary files /dev/null and b/Project/Vision/UI/capture_onion@2x.png differ diff --git a/Project/Vision/UI/capture_onion_selected@2x.png b/Project/Vision/UI/capture_onion_selected@2x.png new file mode 100644 index 0000000..c10add4 Binary files /dev/null and b/Project/Vision/UI/capture_onion_selected@2x.png differ diff --git a/Project/Vision/Vision-Info.plist b/Project/Vision/Vision-Info.plist index 0394c8b..62877c5 100644 --- a/Project/Vision/Vision-Info.plist +++ b/Project/Vision/Vision-Info.plist @@ -31,8 +31,6 @@ UISupportedInterfaceOrientations UIInterfaceOrientationPortrait - UIInterfaceOrientationLandscapeLeft - UIInterfaceOrientationLandscapeRight diff --git a/Source/PBJVision.h b/Source/PBJVision.h index bcccc02..b3d495f 100644 --- a/Source/PBJVision.h +++ b/Source/PBJVision.h @@ -45,6 +45,7 @@ extern NSString * const PBJVisionPhotoThumbnailKey; // 160x120 extern NSString * const PBJVisionVideoPathKey; extern NSString * const PBJVisionVideoThumbnailKey; +@class EAGLContext; @protocol PBJVisionDelegate; @interface PBJVision : NSObject { @@ -89,6 +90,9 @@ extern NSString * const PBJVisionVideoThumbnailKey; @property (nonatomic, readonly) BOOL supportsVideoCapture; @property (nonatomic, readonly) BOOL canCaptureVideo; +@property (nonatomic, getter=isVideoRenderingEnabled) BOOL videoRenderingEnabled; +@property (nonatomic, readonly) EAGLContext *context; +@property (nonatomic) CGRect presentationFrame; - (void)startVideoCapture; - (void)pauseVideoCapture; diff --git a/Source/PBJVision.m b/Source/PBJVision.m index 61722d5..cad48bd 100644 --- a/Source/PBJVision.m +++ b/Source/PBJVision.m @@ -11,6 +11,7 @@ #import #import #import +#import #define LOG_VISION 0 #if !defined(NDEBUG) && LOG_VISION @@ -39,6 +40,26 @@ NSString * const PBJVisionVideoPathKey = @"PBJVisionVideoPathKey"; NSString * const PBJVisionVideoThumbnailKey = @"PBJVisionVideoThumbnailKey"; +// buffer rendering shader uniforms and attributes +// TODO: create an abstraction for shaders + +enum +{ + PBJVisionUniformY, + PBJVisionUniformUV, + PBJVisionUniformCount +}; +GLint _uniforms[PBJVisionUniformCount]; + +enum +{ + PBJVisionAttributeVertex, + PBJVisionAttributeTextureCoord, + PBJVisionAttributeCount +}; + +/// + @interface PBJVision () < AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate> @@ -84,7 +105,21 @@ @interface PBJVision () < CMTime _timeOffset; CMTime _audioTimestamp; CMTime _videoTimestamp; - + + // sample buffer rendering + + PBJCameraDevice _bufferDevice; + PBJCameraOrientation _bufferOrientation; + size_t _bufferWidth; + size_t _bufferHeight; + CGRect _presentationFrame; + + EAGLContext *_context; + GLuint _program; + CVOpenGLESTextureRef _lumaTexture; + CVOpenGLESTextureRef _chromaTexture; + CVOpenGLESTextureCacheRef _videoTextureCache; + // flags struct { @@ -96,6 +131,7 @@ @interface PBJVision () < unsigned int paused:1; unsigned int interrupted:1; unsigned int videoWritten:1; + unsigned int videoRenderingEnabled:1; } __block _flags; } @@ -110,6 +146,8 @@ @implementation PBJVision @synthesize cameraMode = _cameraMode; @synthesize cameraOrientation = _cameraOrientation; @synthesize focusMode = _focusMode; +@synthesize context = _context; +@synthesize presentationFrame = _presentationFrame; #pragma mark - singleton @@ -139,6 +177,16 @@ - (BOOL)isRecording return isRecording; } +- (void)setVideoRenderingEnabled:(BOOL)videoRenderingEnabled +{ + _flags.videoRenderingEnabled = (unsigned int)videoRenderingEnabled; +} + +- (BOOL)isVideoRenderingEnabled +{ + return _flags.videoRenderingEnabled; +} + - (void)_setOrientationForConnection:(AVCaptureConnection *)connection { if (!connection) @@ -219,6 +267,12 @@ - (id)init { self = [super init]; if (self) { + _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; + if (!_context) { + DLog(@"failed to create GL context"); + } + [self _setupGL]; + _captureSessionDispatchQueue = dispatch_queue_create("PBJVisionSession", DISPATCH_QUEUE_SERIAL); // protects session _captureVideoDispatchQueue = dispatch_queue_create("PBJVisionVideo", DISPATCH_QUEUE_SERIAL); // protects capture _previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:nil]; @@ -233,6 +287,15 @@ - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self]; _delegate = nil; + + [self _cleanUpTextures]; + + if (_videoTextureCache) { + CFRelease(_videoTextureCache); + _videoTextureCache = NULL; + } + + [self _destroyGL]; } #pragma mark - queue helper methods @@ -269,6 +332,15 @@ - (void)_setupCamera { if (_captureSession) return; + +#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API + CVReturn cvError = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache); +#else + CVReturn cvError = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)_context, NULL, &_videoTextureCache); +#endif + if (cvError) { + NSLog(@"error CVOpenGLESTextureCacheCreate (%d)", cvError); + } _captureSession = [[AVCaptureSession alloc] init]; @@ -1220,10 +1292,118 @@ - (CMSampleBufferRef)_createOffsetSampleBuffer:(CMSampleBufferRef)sampleBuffer w return outputSampleBuffer; } -- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection +#pragma mark - sample buffer processing + +- (void)_cleanUpTextures { - // TODO: save the last frame for onion skinning + CVOpenGLESTextureCacheFlush(_videoTextureCache, 0); + + if (_lumaTexture) { + CFRelease(_lumaTexture); + _lumaTexture = NULL; + } + + if (_chromaTexture) { + CFRelease(_chromaTexture); + _chromaTexture = NULL; + } +} +// convert CoreVideo YUV pixel buffer (Y luminance and Cb Cr chroma) into RGB +// processing is done on the GPU, operation WAY more efficient than converting .on the CPU +- (void)_processSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + if (!_context) + return; + + if (!_videoTextureCache) + return; + + CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + + if (CVPixelBufferLockBaseAddress(imageBuffer, 0) != kCVReturnSuccess) + return; + + [EAGLContext setCurrentContext:_context]; + + [self _cleanUpTextures]; + + size_t width = CVPixelBufferGetWidth(imageBuffer); + size_t height = CVPixelBufferGetHeight(imageBuffer); + + // only bind the vertices once or if parameters change + + if (_bufferWidth != width || + _bufferHeight != height || + _bufferDevice != _cameraDevice || + _bufferOrientation != _cameraOrientation) { + + _bufferWidth = width; + _bufferHeight = height; + _bufferDevice = _cameraDevice; + _bufferOrientation = _cameraOrientation; + [self _setupBuffers]; + + } + + // always upload the texturs since the input may be changing + + CVReturn error = 0; + + // Y-plane + glActiveTexture(GL_TEXTURE0); + error = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + _videoTextureCache, + imageBuffer, + NULL, + GL_TEXTURE_2D, + GL_RED_EXT, + (GLsizei)_bufferWidth, + (GLsizei)_bufferHeight, + GL_RED_EXT, + GL_UNSIGNED_BYTE, + 0, + &_lumaTexture); + if (error) { + DLog(@"error CVOpenGLESTextureCacheCreateTextureFromImage (%d)", error); + } + + glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture)); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + // UV-plane + glActiveTexture(GL_TEXTURE1); + error = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + _videoTextureCache, + imageBuffer, + NULL, + GL_TEXTURE_2D, + GL_RG_EXT, + (GLsizei)(_bufferWidth * 0.5), + (GLsizei)(_bufferHeight * 0.5), + GL_RG_EXT, + GL_UNSIGNED_BYTE, + 1, + &_chromaTexture); + if (error) { + DLog(@"error CVOpenGLESTextureCacheCreateTextureFromImage (%d)", error); + } + + glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture)); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + if (CVPixelBufferUnlockBaseAddress(imageBuffer, 0) != kCVReturnSuccess) + return; + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); +} + +#pragma mark - AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate + +- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection +{ CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); CFRetain(sampleBuffer); CFRetain(formatDescription); @@ -1314,6 +1494,14 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM _videoTimestamp = time; _flags.videoWritten = YES; } + + // process the sample buffer for rendering + if (_flags.videoRenderingEnabled && _flags.videoWritten) { + [self _executeBlockOnMainQueue:^{ + [self _processSampleBuffer:bufferToWrite]; + }]; + } + CFRelease(bufferToWrite); } @@ -1541,5 +1729,188 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N } } +#pragma mark - OpenGLES context support + +- (void)_setupBuffers +{ + +// unit square for testing +// static const GLfloat unitSquareVertices[] = { +// -1.0f, -1.0f, +// 1.0f, -1.0f, +// -1.0f, 1.0f, +// 1.0f, 1.0f, +// }; + + CGSize inputSize = CGSizeMake(_bufferWidth, _bufferHeight); + CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(inputSize, _presentationFrame); + + CGFloat widthScale = CGRectGetHeight(_presentationFrame) / CGRectGetHeight(insetRect); + CGFloat heightScale = CGRectGetWidth(_presentationFrame) / CGRectGetWidth(insetRect); + + static GLfloat vertices[8]; + + vertices[0] = -widthScale; + vertices[1] = -heightScale; + vertices[2] = widthScale; + vertices[3] = -heightScale; + vertices[4] = -widthScale; + vertices[5] = heightScale; + vertices[6] = widthScale; + vertices[7] = heightScale; + + static const GLfloat textureCoordinates[] = { + 0.0f, 1.0f, + 1.0f, 1.0f, + 0.0f, 0.0f, + 1.0f, 0.0f, + }; + + static const GLfloat textureCoordinatesVerticalFlip[] = { + 1.0f, 1.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 0.0f, 0.0f, + }; + + glEnableVertexAttribArray(PBJVisionAttributeVertex); + glVertexAttribPointer(PBJVisionAttributeVertex, 2, GL_FLOAT, GL_FALSE, 0, vertices); + + if (_cameraDevice == PBJCameraDeviceFront) { + glEnableVertexAttribArray(PBJVisionAttributeTextureCoord); + glVertexAttribPointer(PBJVisionAttributeTextureCoord, 2, GL_FLOAT, GL_FALSE, 0, textureCoordinatesVerticalFlip); + } else { + glEnableVertexAttribArray(PBJVisionAttributeTextureCoord); + glVertexAttribPointer(PBJVisionAttributeTextureCoord, 2, GL_FLOAT, GL_FALSE, 0, textureCoordinates); + } +} + +- (void)_setupGL +{ + [EAGLContext setCurrentContext:_context]; + + [self _loadShaders]; + + glUseProgram(_program); + + glUniform1i(_uniforms[PBJVisionUniformY], 0); + glUniform1i(_uniforms[PBJVisionUniformUV], 1); +} + +- (void)_destroyGL +{ + [EAGLContext setCurrentContext:_context]; + + if (_program) { + glDeleteProgram(_program); + _program = 0; + } + + if ([EAGLContext currentContext] == _context) { + [EAGLContext setCurrentContext:nil]; + } +} + +#pragma mark - OpenGLES shader support +// TODO: abstract this in future + +- (BOOL)_loadShaders +{ + GLuint vertShader; + GLuint fragShader; + NSString *vertShaderName; + NSString *fragShaderName; + + _program = glCreateProgram(); + + vertShaderName = [[NSBundle mainBundle] pathForResource:@"Shader" ofType:@"vsh"]; + if (![self _compileShader:&vertShader type:GL_VERTEX_SHADER file:vertShaderName]) { + DLog(@"failed to compile vertex shader"); + return NO; + } + + fragShaderName = [[NSBundle mainBundle] pathForResource:@"Shader" ofType:@"fsh"]; + if (![self _compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragShaderName]) { + DLog(@"failed to compile fragment shader"); + return NO; + } + + glAttachShader(_program, vertShader); + glAttachShader(_program, fragShader); + + glBindAttribLocation(_program, PBJVisionAttributeVertex, "a_position"); + glBindAttribLocation(_program, PBJVisionAttributeTextureCoord, "a_texture"); + + if (![self _linkProgram:_program]) { + DLog(@"failed to link program, %d", _program); + + if (vertShader) { + glDeleteShader(vertShader); + vertShader = 0; + } + if (fragShader) { + glDeleteShader(fragShader); + fragShader = 0; + } + if (_program) { + glDeleteProgram(_program); + _program = 0; + } + + return NO; + } + + _uniforms[PBJVisionUniformY] = glGetUniformLocation(_program, "u_samplerY"); + _uniforms[PBJVisionUniformUV] = glGetUniformLocation(_program, "u_samplerUV"); + + if (vertShader) { + glDetachShader(_program, vertShader); + glDeleteShader(vertShader); + } + if (fragShader) { + glDetachShader(_program, fragShader); + glDeleteShader(fragShader); + } + + return YES; +} + +- (BOOL)_compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file +{ + GLint status; + const GLchar *source; + + source = (GLchar *)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String]; + if (!source) { + DLog(@"failed to load vertex shader"); + return NO; + } + + *shader = glCreateShader(type); + glShaderSource(*shader, 1, &source, NULL); + glCompileShader(*shader); + + glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); + if (status == 0) { + glDeleteShader(*shader); + return NO; + } + + return YES; +} + +- (BOOL)_linkProgram:(GLuint)prog +{ + GLint status; + glLinkProgram(prog); + + glGetProgramiv(prog, GL_LINK_STATUS, &status); + if (status == 0) { + return NO; + } + + return YES; +} + @end diff --git a/Source/Shaders/Shader.fsh b/Source/Shaders/Shader.fsh new file mode 100644 index 0000000..caacd4d --- /dev/null +++ b/Source/Shaders/Shader.fsh @@ -0,0 +1,22 @@ + +uniform sampler2D u_samplerY; +uniform sampler2D u_samplerUV; + +varying highp vec2 v_texture; + +void main() +{ + mediump vec3 yuv; + lowp vec3 rgb; + + yuv.x = texture2D(u_samplerY, v_texture).r; + yuv.yz = texture2D(u_samplerUV, v_texture).rg - vec2(0.5, 0.5); + + // BT.709, the standard for HDTV + rgb = mat3( 1, 1, 1, + 0, -.18732, 1.8556, + 1.57481, -.46813, 0) * yuv; + + gl_FragColor = vec4(rgb, 1); +} + diff --git a/Source/Shaders/Shader.vsh b/Source/Shaders/Shader.vsh new file mode 100644 index 0000000..547df06 --- /dev/null +++ b/Source/Shaders/Shader.vsh @@ -0,0 +1,11 @@ + +attribute vec4 a_position; +attribute vec2 a_texture; + +varying vec2 v_texture; + +void main() +{ + v_texture = a_texture; + gl_Position = a_position; +}