Skip to content

Instantly share code, notes, and snippets.

@tkgstrator
Last active September 6, 2021 00:25
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
Star You must be signed in to star a gist
Save tkgstrator/2f8f3ecac3777808d69b929a474b5093 to your computer and use it in GitHub Desktop.
CameraView
//
// CameraManager.swift
//
// SwiftyUI
// Created by devonly on 2021/08/27.
//
// Magi Corporation, All rights, reserved.
import UIKit
import AVFoundation
import SwiftUI
class CameraManager: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate, ObservableObject {
@Published var image: UIImage?
@Published var orientation: UIInterfaceOrientation
public var captureSession: AVCaptureSession
private var videoDevice: AVCaptureDevice?
private var videoOutput: AVCaptureVideoDataOutput
private var photoOutput: AVCapturePhotoOutput
private let queue: DispatchQueue = DispatchQueue(label: "Camera View", attributes: .concurrent)
public init(
deviceType: AVCaptureDevice.DeviceType,
mediaType: AVMediaType?,
position: AVCaptureDevice.Position = .front
) {
self.orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation ?? .portrait
self.captureSession = AVCaptureSession()
self.photoOutput = AVCapturePhotoOutput()
self.videoOutput = AVCaptureVideoDataOutput()
super.init()
self.setupCaptureSession(deviceType: deviceType, mediaType: mediaType, position: position)
}
/// カメラデバイスを設定する
private func setupCaptureSession(deviceType: AVCaptureDevice.DeviceType, mediaType: AVMediaType?, position: AVCaptureDevice.Position) {
// シミュレータではカメラが起動しないので除外
#if !targetEnvironment(simulator)
videoDevice = AVCaptureDevice.default(deviceType, for: mediaType, position: position)
captureSession.beginConfiguration()
do {
let videoInput = try AVCaptureDeviceInput(device: videoDevice!) as AVCaptureDeviceInput
captureSession.addInput(videoInput)
} catch let error as NSError {
print(error)
}
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable as! String : Int(kCVPixelFormatType_32BGRA)]
videoOutput.setSampleBufferDelegate(self, queue: queue)
videoOutput.alwaysDiscardsLateVideoFrames = true
captureSession.addOutput(videoOutput)
captureSession.addOutput(photoOutput)
captureSession.sessionPreset = .photo
captureSession.commitConfiguration()
#endif
}
/// セッションを開始する
public func setupSession() {
guard let orientation: UIInterfaceOrientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation else { return }
guard let videoOrientation: AVCaptureVideoOrientation = AVCaptureVideoOrientation(rawValue: orientation.rawValue) else { return }
for conn in self.captureSession.connections {
conn.videoOrientation = videoOrientation
}
captureSession.startRunning()
}
/// セッションを閉じる
public func endSession() {
captureSession.stopRunning()
}
/// フロントカメラとバックカメラを切り替える
public func changeCameraPosition() {
captureSession.stopRunning()
// セッションをすべて閉じてからビデオデバイスを切り替えて再起動
captureSession.inputs.forEach { input in
self.captureSession.removeInput(input)
}
captureSession.outputs.forEach { output in
self.captureSession.removeOutput(output)
}
if let videoDevice = videoDevice {
let position: AVCaptureDevice.Position = videoDevice.position == .front ? .back : .front
let deviceType: AVCaptureDevice.DeviceType = videoDevice.deviceType
let mediaType: AVMediaType? = .video
setupCaptureSession(deviceType: deviceType, mediaType: mediaType, position: position)
setupSession()
}
}
private func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage {
let imageBuffer: CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
let width = CVPixelBufferGetWidth(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = (CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue)
let context = CGContext(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo)
let imageRef = context!.makeImage()
CVPixelBufferUnlockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let resultImage: UIImage = UIImage(cgImage: imageRef!)
return resultImage
}
}
extension UIInterfaceOrientation {
var degree: CGFloat {
switch self {
case .landscapeLeft:
return 90
case .landscapeRight:
return -90
case .portrait:
return 0
case .portraitUpsideDown:
return 180
case .unknown:
return 0
@unknown default:
fatalError()
}
}
}
//
// CameraPreview.swift
//
// SwiftyUI
// Created by devonly on 2021/08/27.
//
// Magi Corporation, All rights, reserved.
import Foundation
import SwiftUI
// UICameraViewをSwiftUIのViewとして利用するためのUIViewRepresentable
struct CameraPreview: UIViewRepresentable {
let previewFrame: CGRect
let capture: CameraManager
func makeUIView(context: Context) -> UICameraView {
let view = UICameraView(frame: previewFrame, session: self.capture.captureSession)
view.setupPreview(previewSize: previewFrame)
return view
}
func updateUIView(_ uiView: UIViewType, context: Context) {
uiView.updateFrame(frame: previewFrame)
}
}
//
// CameraView.swift
//
// SwiftyUI
// Created by devonly on 2021/08/27.
//
// Magi Corporation, All rights, reserved.
import Foundation
import SwiftUI
// カメラの映像を取得して表示する
public struct CameraView: View {
@StateObject var capture: CameraManager = CameraManager(deviceType: .builtInWideAngleCamera, mediaType: .video, position: .front)
public init() {}
/// 表示時にセッション開始
/// 非表示時にセッション終了
public var body: some View {
GeometryReader { geometry in
CameraPreview(previewFrame: CGRect(x: 0, y: 0, width: geometry.size.width, height: geometry.size.height), capture: capture)
.frame(width: geometry.size.width, height: geometry.size.height, alignment: .center)
}
.onAppear(perform: capture.setupSession)
.onDisappear(perform: capture.endSession)
}
}
//
// UICameraPreview.swift
//
// SwiftyUI
// Created by devonly on 2021/08/27.
//
// Magi Corporation, All rights, reserved.
import Foundation
import AVFoundation
import UIKit
// UIViewを継承したカメラの映像を表示するViewクラス
final class UICameraView: UIView {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
init(frame: CGRect, session: AVCaptureSession) {
self.captureSession = session
super.init(frame: frame)
addOrientationChangeDetector()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
}
func setupPreview(previewSize: CGRect) {
self.frame = previewSize
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.previewLayer.frame = self.bounds
self.previewLayer.videoGravity = .resizeAspectFill
self.layer.addSublayer(previewLayer)
}
func updateFrame(frame: CGRect) {
}
private func addOrientationChangeDetector() {
NotificationCenter.default.addObserver(
self,
selector: #selector(orientationChanged),
name: UIDevice.orientationDidChangeNotification,
object: nil
)
}
@objc private func orientationChanged() {
updatePreviewOrientation()
}
private func updatePreviewOrientation() {
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment