Last active
September 2, 2019 17:38
-
-
Save manchan/d270366af589034290d5 to your computer and use it in GitHub Desktop.
メガネ美女子が流し目でシャッター by JINS MEME http://yuichi-dev.blogspot.com/2015/12/blink-sidelong-glance-shoot-by-glasses-jins-meme.html
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// PhotoShootViewController.swift | |
// JinsMemeDemo | |
// ブログ記事はこちら | |
// http://yuichi-dev.blogspot.com/2015/12/blink-sidelong-glance-shoot-by-glasses-jins-meme.html | |
// Created by matz on 2015/12/08. | |
// Copyright © 2015年 matz. All rights reserved. | |
// | |
import UIKit | |
import AVFoundation | |
class PhotoShootViewController: UIViewController,UIGestureRecognizerDelegate, UIDocumentInteractionControllerDelegate { | |
var input:AVCaptureDeviceInput! | |
var output:AVCaptureStillImageOutput! | |
var session:AVCaptureSession! | |
var preView:UIView! | |
var camera:AVCaptureDevice! | |
var latestRealTimeData:MEMERealTimeData? | |
var interactionController: UIDocumentInteractionController! | |
var interactionIsOPEN: Bool = true | |
override func viewDidLoad() { | |
super.viewDidLoad() | |
// 画面タップでシャッターを切るための設定 | |
let tapGesture:UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "tapped:") | |
// デリゲートをセット | |
tapGesture.delegate = self; | |
// Viewに追加. | |
self.view.addGestureRecognizer(tapGesture) | |
} | |
func memeRealTimeModeDataReceived(data: MEMERealTimeData) { | |
self.latestRealTimeData = data | |
if let d = self.latestRealTimeData as MEMERealTimeData?{ | |
// 流し目、左か右 | |
if NSString(format: "%d", d.eyeMoveRight) as String != "0" || | |
NSString(format: "%d", d.eyeMoveLeft) as String != "0" { | |
print("視線移動なう!") | |
self.takeStillPicture() | |
} | |
} | |
} | |
// メモリ管理のため | |
override func viewWillAppear(animated: Bool) { | |
// スクリーン設定 | |
setupDisplay() | |
// カメラの設定 | |
setupCamera() | |
} | |
// メモリ管理のため | |
override func viewDidDisappear(animated: Bool) { | |
// camera stop メモリ解放 | |
session.stopRunning() | |
for output in session.outputs { | |
session.removeOutput(output as? AVCaptureOutput) | |
} | |
for input in session.inputs { | |
session.removeInput(input as? AVCaptureInput) | |
} | |
session = nil | |
camera = nil | |
} | |
func setupDisplay(){ | |
//スクリーンの幅 | |
let screenWidth = UIScreen.mainScreen().bounds.size.width | |
//スクリーンの高さ | |
let screenHeight = UIScreen.mainScreen().bounds.size.height | |
// プレビュー用のビューを生成 | |
preView = UIView(frame: CGRectMake(0.0, 0.0, screenWidth, screenHeight)) | |
} | |
func setupCamera(){ | |
// セッション | |
session = AVCaptureSession() | |
for caputureDevice: AnyObject in AVCaptureDevice.devices() { | |
// 背面カメラを取得 | |
// if caputureDevice.position == AVCaptureDevicePosition.Back { | |
// camera = caputureDevice as? AVCaptureDevice | |
// } | |
// 前面カメラを取得 | |
if caputureDevice.position == AVCaptureDevicePosition.Front { | |
camera = caputureDevice as? AVCaptureDevice | |
} | |
} | |
// カメラからの入力データ | |
do { | |
input = try AVCaptureDeviceInput(device: camera) as AVCaptureDeviceInput | |
} catch let error as NSError { | |
print(error) | |
} | |
// 入力をセッションに追加 | |
if(session.canAddInput(input)) { | |
session.addInput(input) | |
} | |
// 静止画出力のインスタンス生成 | |
output = AVCaptureStillImageOutput() | |
// 出力をセッションに追加 | |
if(session.canAddOutput(output)) { | |
session.addOutput(output) | |
} | |
// セッションからプレビューを表示を | |
let previewLayer = AVCaptureVideoPreviewLayer(session: session) | |
previewLayer.frame = preView.frame | |
// previewLayer.videoGravity = AVLayerVideoGravityResize | |
// previewLayer.videoGravity = AVLayerVideoGravityResizeAspect | |
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill | |
// レイヤーをViewに設定 | |
// これを外すとプレビューが無くなる、けれど撮影はできる | |
self.view.layer.addSublayer(previewLayer) | |
session.startRunning() | |
} | |
// タップイベント. | |
func tapped(sender: UITapGestureRecognizer){ | |
print("タップ") | |
takeStillPicture() | |
} | |
func takeStillPicture(){ | |
// メニューオープンではない状態で ビデオ出力に接続. | |
if let _ = interactionIsOPEN as Bool? ,let connection:AVCaptureConnection? = output.connectionWithMediaType(AVMediaTypeVideo){ | |
// ビデオ出力から画像を非同期で取得 | |
output.captureStillImageAsynchronouslyFromConnection(connection, completionHandler: { (imageDataBuffer, error) -> Void in | |
// 取得画像のDataBufferをJpegに変換 | |
if let _ = imageDataBuffer as CMSampleBuffer? { | |
let imageData:NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataBuffer) | |
// JPEGからUIImageを作成. | |
let image:UIImage = UIImage(data: imageData)! | |
// アルバムに追加. | |
UIImageWriteToSavedPhotosAlbum(image, self, nil, nil) | |
// ファイルに書き込み | |
let imageD: NSData = UIImageJPEGRepresentation(image, 0.75)! | |
let filePath: String = NSHomeDirectory().stringByAppendingString("/Documents/insta.igo") | |
imageD.writeToFile(filePath, atomically: true) | |
let fileUrl: NSURL = NSURL(fileURLWithPath: filePath) | |
// 他のアプリケーションへデータを受け渡すためにUIDocumentInteractionControllerを初期化 | |
self.interactionController = UIDocumentInteractionController(URL: fileUrl) | |
self.interactionController!.UTI = "com.instagram.exclusivegram" | |
self.interactionController.delegate = self | |
let msgBody = "My message" | |
self.interactionController!.annotation = NSDictionary(object: msgBody, forKey: "InstagramCaption") | |
self.interactionController!.presentOpenInMenuFromRect(self.view.frame, inView: self.view, animated: true) | |
} | |
}) | |
} | |
} | |
func documentInteractionControllerDidDismissOpenInMenu(controller: UIDocumentInteractionController) { | |
interactionIsOPEN = true | |
} | |
func documentInteractionControllerWillPresentOpenInMenu(controller: UIDocumentInteractionController) { | |
interactionIsOPEN = false | |
} | |
override func didReceiveMemoryWarning() { | |
super.didReceiveMemoryWarning() | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment