Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bulentsiyah/a49f922bf2d62a7cebe79dcc668651e8 to your computer and use it in GitHub Desktop.
Save bulentsiyah/a49f922bf2d62a7cebe79dcc668651e8 to your computer and use it in GitHub Desktop.
Firebase ML Kit ile Görüntü Üzerindeki Metinlerin Tanınması | iOS --- http://www.bulentsiyah.com/firebase-ml-kit-ile-goruntu-uzerindeki-metinlerin-taninmasi-ios/
//
// FirebaseMLMetinTanimaViewController.swift
// ML Ornekleri
//
// Created by Bülent Siyah on 6.07.2018.
// Copyright © 2018 Bülent Siyah. All rights reserved.
//
import UIKit
import FirebaseMLVision
class FirebaseMLMetinTanimaViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
@IBOutlet weak var imageView: UIImageView!
@IBOutlet weak var textResult: UITextView!
var imagePicker: UIImagePickerController!
var userPickedImage: UIImage?
var textDetector: VisionTextDetector!
var cloudTextDetector: VisionCloudTextDetector!
var frameSublayer = CALayer()
override func viewDidLoad() {
super.viewDidLoad()
textDetector = Vision().textDetector()
imageView.layer.addSublayer(frameSublayer)
cloudTextDetector = Vision().cloudTextDetector()
imagePicker = UIImagePickerController()
imagePicker.delegate = self
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
if let pickedImage = info[UIImagePickerControllerOriginalImage] as? UIImage {
imageView.contentMode = .scaleAspectFit
imageView.image = pickedImage
self.userPickedImage = pickedImage
}
dismiss(animated: true, completion: nil)
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
dismiss(animated: true, completion: nil)
}
@IBAction func btnResimSec(_ sender: Any) {
imagePicker.allowsEditing = false
imagePicker.sourceType = .photoLibrary
present(imagePicker, animated: true, completion: nil)
}
@IBAction func btnAnalizEt(_ sender: Any) {
self.textResult.text = ""
let visionImage = VisionImage(image: self.userPickedImage!)
textDetector.detect(in: visionImage) { features, error in
self.processResult(from: features, error: error)
}
// runCloudTextRecognition(with: imageView.image!)
}
func processResult(from text: [VisionText]?, error: Error?) {
removeFrames()
guard let features = text, let image = imageView.image else {
return
}
for text in features {
if let block = text as? VisionTextBlock {
for line in block.lines {
self.textResult.insertText("\n")
for element in line.elements {
self.textResult.insertText( element.text)
self.addFrameView(
featureFrame: element.frame,
imageSize: image.size,
viewFrame: self.imageView.frame,
text: element.text
)
}
}
}
}
}
func runCloudTextRecognition(with image: UIImage) {
let visionImage = VisionImage(image: image)
cloudTextDetector.detect(in: visionImage) { features, error in
if let error = error {
print("Received error: \(error)")
return
}
self.processCloudResult(from: features, error: error)
}
}
func processCloudResult(from text: VisionCloudText?, error: Error?) {
removeFrames()
guard let features = text, let image = imageView.image, let pages = features.pages else {
return
}
for page in pages {
for block in page.blocks ?? [] {
for paragraph in block.paragraphs ?? [] {
for word in paragraph.words ?? [] {
self.addFrameView(
featureFrame: word.frame,
imageSize: image.size,
viewFrame: self.imageView.frame
)
}
}
}
}
}
private func removeFrames() {
guard let sublayers = self.frameSublayer.sublayers else { return }
for sublayer in sublayers {
guard let frameLayer = sublayer as CALayer? else {
print("Failed to remove frame layer.")
continue
}
frameLayer.removeFromSuperlayer()
}
}
/// Converts a feature frame to a frame UIView that is displayed over the image.
///
/// - Parameters:
/// - featureFrame: The rect of the feature with the same scale as the original image.
/// - imageSize: The size of original image.
/// - viewRect: The view frame rect on the screen.
private func addFrameView(featureFrame: CGRect, imageSize: CGSize, viewFrame: CGRect, text: String? = nil) {
print("Frame: \(featureFrame).")
let viewSize = viewFrame.size
// Find resolution for the view and image
let rView = viewSize.width / viewSize.height
let rImage = imageSize.width / imageSize.height
// Define scale based on comparing resolutions
var scale: CGFloat
if rView > rImage {
scale = viewSize.height / imageSize.height
} else {
scale = viewSize.width / imageSize.width
}
// Calculate scaled feature frame size
let featureWidthScaled = featureFrame.size.width * scale
let featureHeightScaled = featureFrame.size.height * scale
// Calculate scaled feature frame top-left point
let imageWidthScaled = imageSize.width * scale
let imageHeightScaled = imageSize.height * scale
let imagePointXScaled = (viewSize.width - imageWidthScaled) / 2
let imagePointYScaled = (viewSize.height - imageHeightScaled) / 2
let featurePointXScaled = imagePointXScaled + featureFrame.origin.x * scale
let featurePointYScaled = imagePointYScaled + featureFrame.origin.y * scale
// Define a rect for scaled feature frame
let featureRectScaled = CGRect(x: featurePointXScaled,
y: featurePointYScaled,
width: featureWidthScaled,
height: featureHeightScaled)
drawFrame(featureRectScaled, text: text)
}
/// Creates and draws a frame for the calculated rect as a sublayer.
///
/// - Parameter rect: The rect to draw.
private func drawFrame(_ rect: CGRect, text: String? = nil) {
let bpath: UIBezierPath = UIBezierPath(rect: rect)
let rectLayer: CAShapeLayer = CAShapeLayer()
rectLayer.path = bpath.cgPath
rectLayer.strokeColor = Constants.lineColor
rectLayer.fillColor = Constants.fillColor
rectLayer.lineWidth = Constants.lineWidth
if let text = text {
let textLayer = CATextLayer()
textLayer.string = text
textLayer.fontSize = 12.0
textLayer.foregroundColor = Constants.lineColor
let center = CGPoint(x: rect.midX, y: rect.midY)
textLayer.position = center
textLayer.frame = rect
textLayer.alignmentMode = kCAAlignmentCenter
textLayer.contentsScale = UIScreen.main.scale
frameSublayer.addSublayer(textLayer)
}
frameSublayer.addSublayer(rectLayer)
}
fileprivate enum Constants {
static let lineWidth: CGFloat = 3.0
static let lineColor = UIColor.yellow.cgColor
static let fillColor = UIColor.clear.cgColor
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment