Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save mediumTaj/46a3ebbe00faa103319ad42f545a725b to your computer and use it in GitHub Desktop.
Save mediumTaj/46a3ebbe00faa103319ad42f545a725b to your computer and use it in GitHub Desktop.
using IBM.Watson.DeveloperCloud.Connection;
using IBM.Watson.DeveloperCloud.Logging;
using IBM.Watson.DeveloperCloud.Services.VisualRecognition.v3;
using IBM.Watson.DeveloperCloud.Utilities;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class DetectFacesExample : MonoBehaviour
{
private VisualRecognition visualRecognition;
private string visualRecognitionApiKey = "";
private string visualRecognitionServiceUrl = "";
private string imagePath;
private string versionDate = "2018-10-29";
void Start()
{
LogSystem.InstallDefaultReactors();
// Set image path
imagePath = Application.dataPath + "/Watson/Examples/ServiceExamples/TestData/visual-recognition-classifiers/obama.jpg";
// Create service in coroutine
Runnable.Run(CreateService());
}
private IEnumerator CreateService()
{
// Create tokenOptions
TokenOptions visualRecognitionTokenOptions = new TokenOptions()
{
IamApiKey = visualRecognitionApiKey
};
// Create credentials
Credentials visualRecognitionCredentials = new Credentials(visualRecognitionTokenOptions, visualRecognitionServiceUrl);
// Wait for tokendata
while (!visualRecognitionCredentials.HasIamTokenData())
yield return null;
// Instantiate service
visualRecognition = new VisualRecognition(visualRecognitionCredentials);
// Set version date
visualRecognition.VersionDate = versionDate;
// Classify
visualRecognition.DetectFaces(OnDetectFaces, OnFail, imagePath);
}
private void OnDetectFaces(DetectedFaces response, Dictionary<string, object> customData)
{
// Print response json to console
Log.Debug("ClassifyExample", "{0}", customData["json"].ToString());
// Print gender, age and confidence
Log.Debug("ClassifyExample", "gender: {0}, score: {1}, age: {2} - {3}, score: {4}", response.images[0].faces[0].gender.gender, response.images[0].faces[0].gender.score, response.images[0].faces[0].age.min, response.images[0].faces[0].age.max, response.images[0].faces[0].age.score);
}
// Fail callback
private void OnFail(RESTConnector.Error error, Dictionary<string, object> customData)
{
Log.Debug("ClassifyExample", "Failed to classify");
}
}
@chanholl
Copy link

Hello! I am very new to unity and am trying to follow this tutorial: https://www.youtube.com/watch?v=PvgK8wfQ_Mw

I have manage to get the camera render setup but the face detection script is not working. Can I use the script you have posted here and the face detection script?

I am also having problems with the "type or namespace not being found" with many of the terms.

Any help would be much appreciated!!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment