Skip to content

Instantly share code, notes, and snippets.

@chika-kasymov
Created May 25, 2017 16:58
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save chika-kasymov/aac0d69d3c30154b5471668b234a399f to your computer and use it in GitHub Desktop.
Save chika-kasymov/aac0d69d3c30154b5471668b234a399f to your computer and use it in GitHub Desktop.
#import "ARCaptureRenderTarget.h"
#import <CoreVideo/CoreVideo.h>
#import <AVFoundation/AVFoundation.h>
#include <OpenGLES/ES2/gl.h>
#include <OpenGLES/ES2/glext.h>
#include <OpenGLES/ES3/gl.h>
#include <OpenGLES/ES3/glext.h>
@implementation ARCaptureRenderTarget {
CVOpenGLESTextureCacheRef cvTextureCache;
}
- (instancetype)initWithWidth:(float)width height:(float)height {
self = [super initWithWidth:width height:height];
if (self) {
// Account for the scaling factor associated with some iOS devices.
self.width *= [UIScreen mainScreen].scale;
self.height *= [UIScreen mainScreen].scale;
// TODO: small hack for now to take screenshot (without that screenshot is black)
[self startRecording];
[self stopRecording:^(NSURL *videoURL, double duration) {
// do nothing here
}];
}
return self;
}
- (void)setupAssetWriter {
// Set up asset writer.
NSError *outError;
// Write the video file to the application's library directory
NSURL *outputURL = [self videoURL];
_currentVideoURL = outputURL;
// Delete a file with the same path if one exists.
if ([[NSFileManager defaultManager] fileExistsAtPath:[outputURL path]]){
[[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
}
_assetWriter = [AVAssetWriter assetWriterWithURL:outputURL
fileType:AVFileTypeQuickTimeMovie
error:&outError];
if (outError) {
NSAssert(NO, @"Error creating AVAssetWriter");
}
// Set up asset writer inputs.
// Set up the asset writer to encode video in the H.264 format, with the height and width equal to that
// of the framebuffer object.
NSDictionary *assetWriterInputAttributesDictionary =
[NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:self.width], AVVideoWidthKey,
[NSNumber numberWithInt:self.height], AVVideoHeightKey,
nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:assetWriterInputAttributesDictionary];
assetWriterInput.expectsMediaDataInRealTime = true;
// Assume the input pixel buffer is in BGRA format, the iOS standard format.
NSDictionary *sourcePixelBufferAttributesDictionary =
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:self.width], kCVPixelBufferWidthKey,
[NSNumber numberWithInt:self.height], kCVPixelBufferHeightKey,
nil];
_assetWriterPixelBufferInput = [[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput: assetWriterInput
sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary];
// Add the input to the writer if possible.
if ([_assetWriter canAddInput:assetWriterInput]) {
[_assetWriter addInput:assetWriterInput];
} else {
NSAssert(NO, @"Error adding asset writer input");
}
// Start the asset writer immediately for this simple example.
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:kCMTimeZero];
// Store the date when the asset writer started recording video.
_startDate = [NSDate date];
// Check the asset writer has started.
if (_assetWriter.status == AVAssetWriterStatusFailed) {
NSAssert(NO, @"Error starting asset writer %@", _assetWriter.error);
}
}
- (void)setupFBO {
// Make the renderer context current, necessary to create any new OpenGL objects.
[[ARRenderer getInstance] useContext];
// Create the FBO.
glActiveTexture(GL_TEXTURE1);
glGenFramebuffers(1, &_fbo);
[self bindBuffer];
// Create the OpenGL texture cache.
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [EAGLContext currentContext], NULL, &cvTextureCache);
if (err) {
NSAssert(NO, @"Error creating CVOpenGLESTextureCacheCreate %d", err);
}
// Create the OpenGL texture we will be rendering to.
CVPixelBufferPoolRef pixelBufferPool = [_assetWriterPixelBufferInput pixelBufferPool];
err = CVPixelBufferPoolCreatePixelBuffer (kCFAllocatorDefault, pixelBufferPool, &_pixelBuffer);
if (err) {
NSAssert(NO, @"Error creating CVPixelBufferPoolCreatePixelBuffer %d", err);
}
CVOpenGLESTextureRef renderTexture;
CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, cvTextureCache, _pixelBuffer,
NULL, // texture attributes
GL_TEXTURE_2D,
GL_RGBA, // opengl format
(int)self.width,
(int)self.height,
GL_BGRA, // native iOS format
GL_UNSIGNED_BYTE,
0,
&renderTexture);
// Attach the OpenGL texture to the framebuffer.
glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
// Create a depth buffer for correct drawing.
GLuint depthRenderbuffer;
glGenRenderbuffers(1, &depthRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8_OES, self.width, self.height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer);
// Check the FBO is complete and ready for rendering
[self checkFBO];
}
- (void)bindBuffer {
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
}
- (void)draw {
// Draw content to the framebuffer as normal.
[super draw];
if (self.assetWriter == nil) {
return;
}
// Prevent encoding of a new frame if the AVAssetWriter is not writing or if the video is completed.
if (self.assetWriter.status != AVAssetWriterStatusWriting || _isVideoFinishing) {
return;
}
// Wait for all OpenGL commands to finish.
glFinish();
// Lock the pixel buffer to allow it to be used for encoding.
CVPixelBufferLockBaseAddress(_pixelBuffer, 0);
// Submit the pixel buffer for the current frame to the asset writer with the correct timestamp.
_currentTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate:_startDate],120);
if (![_assetWriterPixelBufferInput appendPixelBuffer:_pixelBuffer withPresentationTime:_currentTime]) {
NSLog(@"Problem appending pixel buffer at time: %lld", _currentTime.value);
}
// Unlock the pixel buffer to free it.
CVPixelBufferUnlockBaseAddress(_pixelBuffer, 0);
// Update duration of the recorded video
if (_onProgressChange != nil) {
_onProgressChange(CMTimeGetSeconds(_currentTime));
}
}
#pragma mark - Helpers
- (NSURL *)videoURL {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, true);
NSString *documentsDirectory = paths[0];
NSTimeInterval timestamp = [[NSDate date] timeIntervalSince1970];
NSString *filePath = [NSString stringWithFormat:@"%@/video_%08f.mov", documentsDirectory, timestamp];
return [NSURL fileURLWithPath:filePath];
}
#pragma mark - Methods
- (void)startRecording {
// Set up the required render target assets.
[self setupAssetWriter];
[self setupFBO];
_isVideoFinishing = NO;
}
- (void)stopRecording:(void (^)(NSURL *videoURL, double duration))completion {
_isVideoFinishing = YES;
[self.assetWriter finishWritingWithCompletionHandler:^{
NSLog(@"Finished writing video.");
completion(_currentVideoURL, CMTimeGetSeconds(_currentTime));
_currentTime = kCMTimeZero;
}];
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment