Skip to content

Instantly share code, notes, and snippets.

View sayaleepote's full-sized avatar

Sayalee Pote sayaleepote

View GitHub Profile
@sayaleepote
sayaleepote / PodFile
Created February 13, 2019 04:59
PodFile
# Pods for TextDetect
pod 'Firebase/Core' , '~> 5.2.0'
pod 'Firebase/MLVision', '~> 5.2.0'
pod 'Firebase/MLVisionTextModel', '~> 5.2.0'
@sayaleepote
sayaleepote / ViewController.swift
Created February 6, 2019 18:36
UIPickerView for language selection
let languages = ["Select Language", "Hindi", "French", "Italian", "German", "Japanese"]
let languageCodes = ["hi", "hi", "fr", "it", "de", "ja"]
var targetCode = "hi"
// MARK :- UIPickerViewDelegate
extension ViewController: UIPickerViewDataSource, UIPickerViewDelegate {
func numberOfComponents(in pickerView: UIPickerView) -> Int {
@sayaleepote
sayaleepote / ViewController.swift
Created February 6, 2019 18:27
Language Selector Button Tapped
@IBAction func languageSelectorTapped(_ sender: Any) {
if pickerVisible {
languagePickerHeightConstraint.constant = 0
pickerVisible = false
translateText(detectedText: self.detectedText.text ?? "")
} else {
languagePickerHeightConstraint.constant = 150
pickerVisible = true
}
@sayaleepote
sayaleepote / ViewController.swift
Created February 2, 2019 19:04
Translate text
func translateText(detectedText: String) {
guard !detectedText.isEmpty else {
return
}
let task = try? GoogleTranslate.sharedInstance.translateTextTask(text: detectedText, targetLanguage: self.targetCode, completionHandler: { (translatedText: String?, error: Error?) in
debugPrint(error?.localizedDescription)
DispatchQueue.main.async {
@sayaleepote
sayaleepote / ViewController.swift
Created February 2, 2019 17:08
Declaration of vision text detector
import Firebase
class ViewController: UIViewController {
lazy var vision = Vision.vision()
var textDetector: VisionTextDetector?
}
@sayaleepote
sayaleepote / ViewController.swift
Created February 2, 2019 12:13
Detect and Recognise Text Method
func detectText (image: UIImage) {
textDetector = vision.textDetector()
let visionImage = VisionImage(image: image)
textDetector?.detect(in: visionImage) { (features, error) in
guard error == nil, let features = features, !features.isEmpty else {
return
}
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
/* This delegate is fired periodically every time a new video frame is written.
It is called on the dispatch queue specified while setting up the capture session.
*/
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
/* Initialise CVPixelBuffer from sample buffer
CVPixelBuffer is the input type we will feed our coremlmodel .
*/
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
...
...
...
}
enum HandSign: String {
case fiveHand = "FiveHand"
case fistHand = "FistHand"
case victoryHand = "VictoryHand"
case noHand = "NoHand"
}
func configureCamera() {
//Start capture session
let captureSession = AVCaptureSession()
captureSession.sessionPreset = .photo
captureSession.startRunning()
// Add input for capture
guard let captureDevice = AVCaptureDevice.default(for: .video) else { return }
guard let captureInput = try? AVCaptureDeviceInput(device: captureDevice) else { return }