- (void) toSingleChannel:(CMSampleBufferRef)sampleBuffer
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
size_t width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
size_t height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
Pixel_8 *lumaBuffer = (Pixel_8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
#if FLIPIMAGE
const vImage_Buffer imagebuf = {lumaBuffer, height, width, bytesPerRow};
unsigned char *makeBuf = (unsigned char *)malloc( width * height * bytesPerRow );
const vImage_Buffer outbuf = {makeBuf, height, width, bytesPerRow};
vImage_Flags flags = 0; // Flags for vImage function
vImage_Error err = vImageHorizontalReflect_Planar8(&imagebuf, &outbuf,
flags );
if (err!=kvImageNoError)
{
NSLog(@"vImageHorizontalReflect_Planar8 exited with code %ld",err);
}
cv::Mat grayImage((int)outbuf.height, (int)outbuf.width, CV_8U, outbuf.data, outbuf.rowBytes);
cv::Mat croppedRef(grayImage, cv::Rect(1920.f/2.f - 1280.f/2.f, 1080.f/2.f - 800.f/2.f, 1280.f, 800.f ));
cv::resize(grayImage, grayImage, cv::Size(640, 400));
detect(grayImage, &keyPoints, &approxContours );
free(makeBuf);
#else
vImage_Buffer imagebuf = {lumaBuffer, height, width, bytesPerRow};
cv::Mat grayImage((int)imagebuf.height, (int)imagebuf.width, CV_8U, imagebuf.data, imagebuf.rowBytes);
cv::Mat croppedRef(grayImage, cv::Rect(1920.f/2.f - 1280.f/2.f, 1080.f/2.f - 800.f/2.f, 1280.f, 800.f ));
cv::resize(grayImage, grayImage, cv::Size(640, 400));
detect(grayImage, &keyPoints, &approxContours );
#endif
cv::vector<cv::KeyPoint>::iterator it;
NSMutableArray *keypointsArray = [[NSMutableArray alloc] init];
for( it= keyPoints.begin(); it!= keyPoints.end();it++)
{
Keypoint *thisKeypoint = [[Keypoint alloc] init];
thisKeypoint.angle = it->angle;
thisKeypoint.class_id = it->class_id;
thisKeypoint.octave = it->octave;
thisKeypoint.pt = CGPointMake(it->pt.x, it->pt.y);
thisKeypoint.response = it->response;
thisKeypoint.size = it->size;
[keypointsArray addObject:thisKeypoint];
}
NSMutableArray *contoursArray = [NSMutableArray array];
for ( cv::vector<cv::vector<cv::Point> >::iterator it1 = approxContours.begin(); it1 != approxContours.end(); ++it1 )
{
NSMutableArray *contourPoints = [NSMutableArray array];
for ( std::vector<cv::Point>::iterator it2 = (*it1).begin(); it2 != (*it1).end(); ++ it2 )
{
NSPoint thisPoint = CGPointMake( it2->x, it2->y );
[contourPoints addObject:[NSValue valueWithPoint:thisPoint]];
}
NSDictionary *contourDict = @{@"points": contourPoints};
[contoursArray addObject:contourDict];
}
[[NSOperationQueue mainQueue] addOperationWithBlock:^{
if( [self.delegate respondsToSelector:@selector(captureModelDidFindKeypoints:)] )
{
[self.delegate captureModelDidFindKeypoints:keypointsArray];
}
if( [self.delegate respondsToSelector:@selector(captureModelDidFindContours:)] )
{
[self.delegate captureModelDidFindContours:contoursArray];
}
}];
CVPixelBufferUnlockBaseAddress( imageBuffer, 0 );
cv::vector< cv::KeyPoint>().swap(keyPoints);
cv::vector< cv::vector <cv::Point> >().swap(approxContours);
grayImage.release();
croppedRef.release();
// CGContextRelease(context);
}
Created
June 20, 2018 20:42
-
-
Save voxels/96efadcdf4e5b178a3bf20cc278dbb80 to your computer and use it in GitHub Desktop.
Converting a CMSampleBufferRef to a single channel of color
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment