Skip to content

Instantly share code, notes, and snippets.

@depthlove
Forked from nhisyam/ColorDetect.m
Created October 11, 2017 17:21
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save depthlove/6b6cfe39817bd516503bf770f3ed8056 to your computer and use it in GitHub Desktop.
Save depthlove/6b6cfe39817bd516503bf770f3ed8056 to your computer and use it in GitHub Desktop.
Code snippets to show color detection problem using GPUImage
#import "ColorDetectVC.h"
@interface ColorDetectVC ()
{
GPUImageVideoCamera *videoCamera;
GPUImageFilter *rotationFilter;
GPUImageView *filteredVideoView;
GPUImageRawData *positionRawData;
}
@end
@implementation ColorDetectVC
@synthesize centerView = _centerView;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
[self configureVideoFiltering:AVCaptureDevicePositionBack];
}
- (void)viewDidUnload
{
[self setCenterView:nil];
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
- (void)configureVideoFiltering:(AVCaptureDevicePosition)devicePosition
{
CGRect mainScreenFrame = [[UIScreen mainScreen] applicationFrame];
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:devicePosition];
filteredVideoView = [[GPUImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, mainScreenFrame.size.width, mainScreenFrame.size.height)];
[self.view addSubview:filteredVideoView];
[self.view sendSubviewToBack:filteredVideoView];
rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
CGSize videoPixelSize = CGSizeMake(480.0, 640.0);
positionRawData = [[GPUImageRawData alloc] initWithImageSize:videoPixelSize];
positionRawData.delegate = self;
[videoCamera addTarget:rotationFilter];
[rotationFilter addTarget:positionRawData];
[rotationFilter addTarget:filteredVideoView];
[videoCamera startCameraCapture];
}
#pragma mark - GPUImageRawDataProcessor
- (void)newImageFrameAvailableFromDataSource:(GPUImageRawData *)rawDataSource;
{
if (rawDataSource == positionRawData)
{
CGRect mainScreenFrame = [[UIScreen mainScreen] applicationFrame];
CGPoint centerPosition = CGPointMake(CGRectGetMidX(mainScreenFrame), CGRectGetMidY(mainScreenFrame));
CGRect rect = _centerView.frame;
rect.origin.x = centerPosition.x;
rect.origin.y = centerPosition.y;
[_centerView setFrame:rect];
GPUByteColorVector colorVector = [rawDataSource colorAtLocation:centerPosition];
float red = (float)colorVector.red;
float green = (float)colorVector.green;
float blue = (float)colorVector.blue;
float alpha = (float)colorVector.alpha;
NSLog(@"R:%f G:%f B:%f A:%f",red, green, blue, alpha);
UIColor *color = [UIColor colorWithRed:red/255.0 green:green/255.0 blue:blue/255.0 alpha:alpha/255.0];
[_centerView setBackgroundColor:color];
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment