Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save mediumTaj/aae78b34bba864bb0f8fcad3bacb2452 to your computer and use it in GitHub Desktop.
Save mediumTaj/aae78b34bba864bb0f8fcad3bacb2452 to your computer and use it in GitHub Desktop.
using IBM.Watson.DeveloperCloud.Connection;
using IBM.Watson.DeveloperCloud.Logging;
using IBM.Watson.DeveloperCloud.Services.VisualRecognition.v3;
using IBM.Watson.DeveloperCloud.Utilities;
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using UnityEngine;
using UnityEngine.UI;
public class DetectFacesExample : MonoBehaviour
{
private VisualRecognition visualRecognition;
private string visualRecognitionApiKey = "";
private string visualRecognitionServiceUrl = "";
private string versionDate = "2018-10-29";
private byte[] imageData;
private Color32[] colors;
private Texture2D texture2d;
WebCamTexture webcamTexture;
[SerializeField]
private RawImage rawImage;
void Start()
{
LogSystem.InstallDefaultReactors();
// Init WebCamTexture
webcamTexture = new WebCamTexture();
webcamTexture.requestedWidth = 640;
webcamTexture.requestedHeight = 480;
webcamTexture.Play();
// Set raw image texture
rawImage.material.mainTexture = webcamTexture;
// Create service in coroutine
Runnable.Run(CreateService());
}
private void TakePhoto()
{
// Create and set texture
texture2d = new Texture2D(webcamTexture.width, webcamTexture.height);
texture2d.SetPixels(webcamTexture.GetPixels());
texture2d.Apply();
// Set imageData
imageData = texture2d.EncodeToPNG();
// Save image
File.WriteAllBytes(Application.dataPath + "/myImage.png", texture2d.EncodeToPNG());
}
private IEnumerator CreateService()
{
yield return new WaitForSeconds(0.5f);
TakePhoto();
// Create tokenOptions
TokenOptions visualRecognitionTokenOptions = new TokenOptions()
{
IamApiKey = visualRecognitionApiKey
};
// Create credentials
Credentials visualRecognitionCredentials = new Credentials(visualRecognitionTokenOptions, visualRecognitionServiceUrl);
// Wait for tokendata
while (!visualRecognitionCredentials.HasIamTokenData())
yield return null;
// Instantiate service
visualRecognition = new VisualRecognition(visualRecognitionCredentials);
// Set version date
visualRecognition.VersionDate = versionDate;
// Classify
visualRecognition.DetectFaces(OnDetectFaces, OnFail, imageData);
}
private void OnDetectFaces(DetectedFaces response, Dictionary<string, object> customData)
{
// Print response json to console
Log.Debug("DetectFacesExample", "{0}", customData["json"].ToString());
// Print gender, age and confidence
Log.Debug("DetectFacesExample", "gender: {0}, score: {1}, age: {2} - {3}, score: {4}", response.images[0].faces[0].gender.gender, response.images[0].faces[0].gender.score, response.images[0].faces[0].age.min, response.images[0].faces[0].age.max, response.images[0].faces[0].age.score);
}
// Fail callback
private void OnFail(RESTConnector.Error error, Dictionary<string, object> customData)
{
Log.Debug("DetectFacesExample", "Failed to classify");
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment