Skip to content

Instantly share code, notes, and snippets.

@ikonst
Created April 5, 2015 03:33
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ikonst/750b67ae984944971cb0 to your computer and use it in GitHub Desktop.
Save ikonst/750b67ae984944971cb0 to your computer and use it in GitHub Desktop.
avfvideosrc: setjmp/longjmp patch
From c3199506fd03e77b61cd6fdfbd8184e1effb28ba Mon Sep 17 00:00:00 2001
From: Ilya Konstantinov <ilya.konstantinov@gmail.com>
Date: Sun, 5 Apr 2015 05:39:14 +0300
Subject: [PATCH] applemedia: setjmp/longjmp
Migrate the pipeline into the AVFoundation dispatch queue thread
to enable operation without context switching and unneeded yielding.
---
sys/applemedia/avfvideosrc.m | 101 ++++++++++++++++++++++++++++---------------
1 file changed, 66 insertions(+), 35 deletions(-)
diff --git a/sys/applemedia/avfvideosrc.m b/sys/applemedia/avfvideosrc.m
index 6aa084c..6819906 100644
--- a/sys/applemedia/avfvideosrc.m
+++ b/sys/applemedia/avfvideosrc.m
@@ -68,8 +68,8 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
));
typedef enum _QueueState {
- NO_BUFFERS = 1,
- HAS_BUFFER_OR_STOP_REQUEST,
+ RUNNING = 1,
+ STOP_REQUEST,
} QueueState;
#define gst_avf_video_src_parent_class parent_class
@@ -92,7 +92,8 @@ G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
dispatch_queue_t mainQueue;
dispatch_queue_t workerQueue;
NSConditionLock *bufQueueLock;
- NSMutableArray *bufQueue;
+ CMSampleBufferRef delegateSampleBuffer;
+ GstClockTime timestamp, duration;
BOOL stopRequest;
GstCaps *caps;
@@ -112,6 +113,9 @@ G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
BOOL useVideoMeta;
GstCoreVideoTextureCache *textureCache;
+
+ BOOL envCreateValid, envCaptureOutputValid;
+ jmp_buf env_create, env_captureOutput;
}
- (id)init;
@@ -184,6 +188,9 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
gst_base_src_set_live (baseSrc, TRUE);
gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
+
+ envCreateValid = FALSE;
+ envCaptureOutputValid = FALSE;
}
return self;
@@ -369,6 +376,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
gst_format = GST_VIDEO_FORMAT_YUY2;
break;
+ case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: /* 420f (full range 420v) */
default:
GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
[[pixel_format stringValue] UTF8String]);
@@ -710,8 +718,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
- (BOOL)start
{
- bufQueueLock = [[NSConditionLock alloc] initWithCondition:NO_BUFFERS];
- bufQueue = [[NSMutableArray alloc] initWithCapacity:BUFFER_QUEUE_SIZE];
+ bufQueueLock = [[NSConditionLock alloc] initWithCondition:RUNNING];
stopRequest = NO;
offset = 0;
@@ -734,8 +741,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
[bufQueueLock release];
bufQueueLock = nil;
- [bufQueue release];
- bufQueue = nil;
inputClock = nil;
if (textureCache)
@@ -803,7 +808,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
[bufQueueLock lock];
stopRequest = YES;
- [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
+ [bufQueueLock unlockWithCondition:STOP_REQUEST];
return YES;
}
@@ -838,12 +843,11 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
- GstClockTime timestamp, duration;
-
- [bufQueueLock lock];
+ if (!envCreateValid) {
+ return;
+ }
if (stopRequest) {
- [bufQueueLock unlock];
return;
}
@@ -851,15 +855,14 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
[self getSampleBuffer:sampleBuffer timestamp:&timestamp duration:&duration];
- if ([bufQueue count] == BUFFER_QUEUE_SIZE)
- [bufQueue removeLastObject];
+ delegateSampleBuffer = sampleBuffer;
- [bufQueue insertObject:@{@"sbuf": (id)sampleBuffer,
- @"timestamp": @(timestamp),
- @"duration": @(duration)}
- atIndex:0];
-
- [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
+ if (!setjmp (env_captureOutput)) {
+ envCaptureOutputValid = TRUE;
+ longjmp (env_create, 1); // back to create:
+ } else {
+ return; /* continuing captureOutput */
+ }
}
- (GstFlowReturn)create:(GstBuffer **)buf
@@ -868,22 +871,48 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
CVImageBufferRef image_buf;
CVPixelBufferRef pixel_buf;
size_t cur_width, cur_height;
- GstClockTime timestamp, duration;
- [bufQueueLock lockWhenCondition:HAS_BUFFER_OR_STOP_REQUEST];
- if (stopRequest) {
+ if (!setjmp (env_create)) {
+ envCreateValid = TRUE; /* mark this as jumpable */
+
+ if (envCaptureOutputValid) {
+ /* We've come full circle. We've pushed one buffer into the
+ * pipeline, we've been called against by GstBaseSrc,
+ * and now we're ready to give the delegate an opportunity
+ * to get another sample buffer.
+ *
+ * Of course, at this point we're on the AVFoundation thread.
+ *
+ * longjmp into delegate
+ * captureOutput:didOutputSampleBuffer:fromConnection:
+ */
+ longjmp (env_captureOutput, 1);
+ }
+
+ /* The original pipeline thread is sleeping until it's time
+ * to flush the pipeline. */
+
+ [bufQueueLock lockWhenCondition:STOP_REQUEST];
+ g_assert (stopRequest);
[bufQueueLock unlock];
+
return GST_FLOW_FLUSHING;
+
+ } else {
+ /* Arriving in longjmp from delegate
+ * captureOutput:didOutputSampleBuffer:fromConnection:
+ *
+ * This is the delegate giving us opportunity to run the pipeline.
+ * At this point, we're running on the AVFoundation thread.
+ */
+
+ sbuf = delegateSampleBuffer;
}
- NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
- sbuf = (CMSampleBufferRef) dic[@"sbuf"];
- timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
- duration = (GstClockTime) [dic[@"duration"] longLongValue];
- CFRetain (sbuf);
- [bufQueue removeLastObject];
- [bufQueueLock unlockWithCondition:
- ([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
+ /* We have to create the autorelease pool here, now that we're
+ * on the AVFoundation thread since autorelease pools seem to have
+ * thread affinity. */
+ NSAutoreleasePool *poolOnAvfThread = [[NSAutoreleasePool alloc] init];
/* Check output frame size dimensions */
image_buf = CMSampleBufferGetImageBuffer (sbuf);
@@ -916,11 +945,11 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
video_meta->format = format;
}
}
- CFRelease (sbuf);
if (textureCache != NULL) {
*buf = gst_core_video_texture_cache_get_gl_buffer (textureCache, *buf);
if (*buf == NULL) {
+ [poolOnAvfThread release];
return GST_FLOW_ERROR;
}
}
@@ -933,6 +962,9 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
if (doStats)
[self updateStatistics];
+ /* Remember to release the autorelease pool on every return */
+ [poolOnAvfThread release];
+
return GST_FLOW_OK;
}
@@ -941,7 +973,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
duration:(GstClockTime *)outDuration
{
CMSampleTimingInfo time_info;
- GstClockTime timestamp, duration, inputClockNow, running_time;
+ GstClockTime inputClockNow, running_time;
CMItemCount num_timings;
GstClock *clock;
CMTime now;
@@ -1318,9 +1350,8 @@ gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
{
GstFlowReturn ret;
- OBJC_CALLOUT_BEGIN ();
+ /* autorelease pool is done differently here */
ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
- OBJC_CALLOUT_END ();
return ret;
}
--
2.2.1
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment