Skip to content

Instantly share code, notes, and snippets.

@20m61
Created December 10, 2012 09:02
Show Gist options
  • Save 20m61/4249496 to your computer and use it in GitHub Desktop.
Save 20m61/4249496 to your computer and use it in GitHub Desktop.
カメラ習作(とりあえず完成版)
//
// ViewController.h
// 121204
//
// Created by changhwi on 12/12/04.
// Copyright (c) 2012年 changhwi. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
//viewのサイズ
int height;
int width;
//viewの中央座標
int centerHeight;
int centerWidth;
float valueR;
float valueG;
float valueB;
float valueA;
int buttonCount;
AVCaptureDevice *frontCamera;
AVCaptureDevice *backCamera;
NSArray *devices;
NSString *cameraDevice;
@interface ViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate, UITextFieldDelegate>
{
AVCaptureSession *session;
UIView *myView;
}
@property (strong, nonatomic) IBOutlet UIView *backgroundView;
@property (strong, nonatomic) IBOutlet UIImageView *cameraView;
- (IBAction)cameraChenge:(id)sender;
- (void)captureOutput:(AVCaptureOutput*)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection*)connection;
@end
//
// ViewController.m
// 121204
//
// Created by changhwi on 12/12/04.
// Copyright (c) 2012年 changhwi. All rights reserved.
//
#import "ViewController.h"
@interface ViewController ()
@end
@implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
buttonCount = 1;
[self drawSlider];
[self drawTextField];
[self cameraRun];
[self viewUpdate];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
}
- (void)viewUpdate{
self.backgroundView.backgroundColor = [UIColor colorWithRed:valueR green:valueG blue:valueB alpha:valueA];
}
#pragma mark - textField
-(void)drawTextField{
//viewのサイズ
int height = self.view.frame.size.height;
int width = self.view.frame.size.width;
//viewの中央座標
int centerHeight = height / 2;
int centerWidth = width / 2;
int textFieldWidth = width / 10 * 9;
UITextField *myTextField =
[[UITextField alloc] initWithFrame:CGRectMake(centerWidth - textFieldWidth / 2, 15, textFieldWidth, 60)];
myTextField.font = [UIFont fontWithName:@"HiraKakuProN-W6" size:[UIFont buttonFontSize]];
myTextField.borderStyle = UITextBorderStyleNone;
myTextField.delegate = self;
[self.view addSubview:myTextField];
}
-(BOOL)textFieldShouldReturn:(UITextField*)myTextField{
[myTextField resignFirstResponder];
return YES;
}
#pragma mark - slider
- (void)drawSlider{
//viewのサイズ
int height = self.view.frame.size.height;
int width = self.view.frame.size.width;
//viewの中央座標
int centerHeight = height / 2;
int centerWidth = width / 2;
//各sliderのサイズ
int sliderHeight = 0;
int sliderWidth = height / 5 * 4;
//slider間の距離
int sliderFix = sliderWidth / 2;
//sliderを中央に合わせるための値
int sliderMargin = width / 5;
//RGBAの初期値
float firstValue = 0.5;
//初期値の代入
valueR = valueG = valueB = valueA = firstValue;
//270度回転させる関数
CGAffineTransform trans = CGAffineTransformMakeRotation(M_PI * 270 / 180.0f);
//sliderR
UISlider *sliderR = [[UISlider alloc]
initWithFrame:CGRectMake(sliderMargin - sliderFix, centerHeight, sliderWidth, sliderHeight)];
sliderR.value = firstValue;
sliderR.maximumValue = 1.0;
sliderR.minimumValue = 0.0;
// sliderR.continuous = YES;
sliderR.transform = trans;
[sliderR addTarget:self
action:@selector(sliderRChengeValue:)
forControlEvents:UIControlEventAllTouchEvents];
[self.view addSubview:sliderR];
//sliderG
UISlider *sliderG = [[UISlider alloc]
initWithFrame:CGRectMake(sliderMargin * 2 - sliderFix, centerHeight, sliderWidth, sliderHeight)];
sliderG.value = firstValue;
sliderG.maximumValue = 1.;
sliderG.minimumValue = 0.;
// sliderG.continuous = YES;
sliderG.transform = trans;
[sliderG addTarget:self
action:@selector(sliderGChengeValue:)
forControlEvents:UIControlEventAllTouchEvents];
[self.view addSubview:sliderG];
//sliderB
UISlider *sliderB = [[UISlider alloc]
initWithFrame:CGRectMake(sliderMargin * 3 - sliderFix, centerHeight, sliderWidth, sliderHeight)];
sliderB.value = firstValue;
sliderB.maximumValue = 1.;
sliderB.minimumValue = 0.;
// sliderB.continuous = YES;
sliderB.transform = trans;
[sliderB addTarget:self
action:@selector(sliderBChengeValue:)
forControlEvents:UIControlEventAllTouchEvents];
[self.view addSubview:sliderB];
//sliderA
UISlider *sliderA = [[UISlider alloc]
initWithFrame:CGRectMake(sliderMargin * 4 - sliderFix, centerHeight, sliderWidth, sliderHeight)];
sliderA.value = firstValue;
sliderA.maximumValue = 1.;
sliderA.minimumValue = 0.;
// sliderA.continuous = YES;
sliderA.transform = trans;
[sliderA addTarget:self
action:@selector(sliderAChengeValue:)
forControlEvents:UIControlEventAllTouchEvents];
[self.view addSubview:sliderA];
}
- (void)sliderRChengeValue:(id)sender{
UISlider *slValueR = sender;
valueR = slValueR.value;
[self viewUpdate];
}
- (void)sliderGChengeValue:(id)sender{
UISlider *slValueG = sender;
valueG = slValueG.value;
[self viewUpdate];
}
- (void)sliderBChengeValue:(id)sender{
UISlider *slValueB = sender;
valueB = slValueB.value;
[self viewUpdate];
}
- (void)sliderAChengeValue:(id)sender{
UISlider *slValueA = sender;
valueA = slValueA.value;
[self viewUpdate];
}
#pragma mark - camera
- (void)cameraLoad{
/**
AVCaptureSession(カメラとのやりとりをする役担当)をalloc & init
---------------------------------------------
*/
session = [[AVCaptureSession alloc] init];
/**
input deviceを選んで、sessionに登録
- できれば front facing camを使いたい
- front camがついてない場合は back cam
- frontもbackもついていない場合は ....
---------------------------------------------
*/
// ビデオキャプチャデバイスの取得(ビデオのデータを持ってきてくれるデバイス)
// このアプリを実行してるiOSデバイスについてるカメラ一覧
NSArray *devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
NSLog(@"Device name: %@", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(@"Device position : back");
backCamera = device;
}
else {
NSLog(@"Device position : front");
frontCamera = device;
}
}
}
// カメラがついていないデバイス向け
if ([devices count] == 0)
{
// 本当はもうちょっと詳しく分別チェックできるけど、videoに対応したfront or back cameraがほしいので、
// count == 0 の場合は、チャンさんがほしいカメラがなかったものとする
NSLog(@"カメラがついてない ....");
}
}
- (void)cameraSelectFront{
NSError *error = nil;
AVCaptureDeviceInput *frontFacingCameraDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&error];
if (error == nil)
{
if ([session canAddInput:frontFacingCameraDeviceInput])
[session addInput:frontFacingCameraDeviceInput];
else {
NSLog(@"Couldn't add front facing video input");
}
}
}
- (void)cameraSelectBack{
NSError *error = nil;
AVCaptureDeviceInput *backCameraDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
if (error == nil)
{
if ([session canAddInput:backCameraDeviceInput])
[session addInput:backCameraDeviceInput];
else {
NSLog(@"Couldn't add front facing video input");
}
}
}
- (void)cameraSetting{
/**
output deviceを選んで、sessionに登録
---------------------------------------------
*/
// output.......
// -----------------------
AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
// ビデオデータ出力の作成
//設定を辞書に書き込む
NSMutableDictionary *settings = [NSMutableDictionary dictionary];
[settings setObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//設定をアウトプットに読み込む
dataOutput.videoSettings = settings;
//メインキュー(デバイスから情報がくるたび=毎フレーム)の後に毎回setSampleBufferDelegateをする
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
// sessionに in/outわたす.......
// -----------------------
// セッションの作成
// [session addInput:deviceInput];
[session addOutput:dataOutput];
// セッションの開始
[session startRunning];
}
- (void)captureOutput:(AVCaptureOutput*)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection*)connection
{
// イメージバッファの取得
CVImageBufferRef buffer;
buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// イメージバッファのロック
CVPixelBufferLockBaseAddress(buffer, 0);
// イメージバッファ情報の取得
uint8_t* base;
size_t width, height, bytesPerRow;
base = CVPixelBufferGetBaseAddress(buffer);
width = CVPixelBufferGetWidth(buffer);
height = CVPixelBufferGetHeight(buffer);
bytesPerRow = CVPixelBufferGetBytesPerRow(buffer);
// ビットマップコンテキストの作成
//色
CGColorSpaceRef colorSpace;
//サイズなど
CGContextRef cgContext;
colorSpace = CGColorSpaceCreateDeviceRGB();
cgContext = CGBitmapContextCreate(
base, width, height, 8, bytesPerRow, colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGColorSpaceRelease(colorSpace);
// 画像の作成
CGImageRef cgImage;
UIImage* image;
cgImage = CGBitmapContextCreateImage(cgContext);
image = [UIImage imageWithCGImage:cgImage scale:1.0f
orientation:UIImageOrientationRight];
CGImageRelease(cgImage);
CGContextRelease(cgContext);
// イメージバッファのアンロック
CVPixelBufferUnlockBaseAddress(buffer, 0);
// 画像の表示
_cameraView.image = image;
}
- (void)cameraRun{
[session stopRunning];
[self cameraLoad];
if (buttonCount == 0) {
buttonCount ++;
}else{
buttonCount = 0;
}
if (buttonCount == 0) {
[self cameraSelectFront];
NSLog(@"complete front camera setting %d", buttonCount);
}
if (buttonCount == 1) {
[self cameraSelectBack];
NSLog(@"complete front back setting %d", buttonCount);
}
[self cameraSetting];
}
- (IBAction)cameraChenge:(id)sender {
[self cameraRun];
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment