Skip to content

Instantly share code, notes, and snippets.

@SalmonKing72
Created January 17, 2020 15:20
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save SalmonKing72/d0f7879b4d1e41595326f6f1c54f8b63 to your computer and use it in GitHub Desktop.
Save SalmonKing72/d0f7879b4d1e41595326f6f1c54f8b63 to your computer and use it in GitHub Desktop.
Helpful VR face recognition utilities
using UnityEngine;
public class CenterOnScreenPoint : MonoBehaviour
{
public Vector2 screenPosition;
[Range(0.25f, 4)]
public float distanceFromCamera = 1.5f;
public float maxSpeed = 0.65f;
[Range(0.005f, 0.01f)]
public float minDistance = 0.0075f;
public float farDistance = 3;
public float outlierDistanceThreshold = 0.3f;
public Vector2 offsetVec = new Vector2(-0.2f, 0);
RectTransform rectTransform;
Vector3 target;
bool isPositionDirty = false;
Quaternion lastCameraRotation;
Vector2 lastPosition;
private void Start()
{
rectTransform = GetComponent<RectTransform>();
UpdatePosition(new Vector2(0.5f, 0.5f), Camera.main.transform.rotation);
}
public void UpdatePosition(Vector2 position, Quaternion lastCameraRotation)
{
position = new Vector2(
Mathf.Clamp01(position.x + offsetVec.x),
Mathf.Clamp01(position.y + offsetVec.y)
);
if (Vector2.Distance(position, new Vector2(0.5f, 0.5f)) < outlierDistanceThreshold)
{
screenPosition = position;
isPositionDirty = true;
}
this.lastCameraRotation = lastCameraRotation;
}
private void Update()
{
if (isPositionDirty)
{
if(Quaternion.Angle(Camera.main.transform.rotation, lastCameraRotation) > 2.5f)
{
screenPosition = lastPosition;
}
else
{
Ray ray = Camera.main.ViewportPointToRay(screenPosition);
target = ray.GetPoint(distanceFromCamera);
}
isPositionDirty = false;
}
Vector3 current = rectTransform.position;
if (Vector3.Distance(current, target) > minDistance)
{
if (Vector3.Distance(current, target) > farDistance)
{
rectTransform.position = Vector3.Lerp(current, target, 0.5f);
}
else
{
rectTransform.position = Vector3.MoveTowards(current, target, maxSpeed);
}
}
}
}
using UnityEngine;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine.XR.WSA.Input;
using UnityEngine.EventSystems;
using UnityEngine.SceneManagement;
using OpenCVForUnity;
public class WebcamPhoto : MonoBehaviour
{
public CenterOnScreenPoint centerOnScreenPoint;
UnityEngine.XR.WSA.WebCam.PhotoCapture m_PhotoCaptureObj;
UnityEngine.XR.WSA.WebCam.CameraParameters m_CameraParameters;
bool m_CapturingPhoto = false;
Texture2D m_Texture = null;
bool isReady = false;
bool isThreadRunning = false;
/// <summary>
/// The rgba mat.
/// </summary>
Mat rgbaMat;
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The faces.
/// </summary>
MatOfRect faces;
/// <summary>
/// The colors.
/// </summary>
Color32[] colors;
Quaternion lastCameraRotation;
void Start()
{
Initialize();
}
void Initialize()
{
Debug.Log("Initializing...");
List<Resolution> resolutions = new List<Resolution>(UnityEngine.XR.WSA.WebCam.PhotoCapture.SupportedResolutions);
Resolution selectedResolution = resolutions[1];
foreach (var item in resolutions)
{
Debug.Log("resolution width " + item.width + " height " + item.height);
}
m_CameraParameters = new UnityEngine.XR.WSA.WebCam.CameraParameters(UnityEngine.XR.WSA.WebCam.WebCamMode.PhotoMode);
m_CameraParameters.cameraResolutionWidth = selectedResolution.width;
m_CameraParameters.cameraResolutionHeight = selectedResolution.height;
m_CameraParameters.hologramOpacity = 0.0f;
m_CameraParameters.pixelFormat = UnityEngine.XR.WSA.WebCam.CapturePixelFormat.BGRA32;
m_Texture = new Texture2D(selectedResolution.width, selectedResolution.height, TextureFormat.BGRA32, false);
rgbaMat = new Mat(m_Texture.height, m_Texture.width, CvType.CV_8UC4);
colors = new Color32[rgbaMat.cols() * rgbaMat.rows()];
grayMat = new Mat(rgbaMat.rows(), rgbaMat.cols(), CvType.CV_8UC1);
faces = new MatOfRect();
cascade = new CascadeClassifier();
cascade.load(Utils.getFilePath("haarcascade_frontalface_alt.xml"));
UnityEngine.XR.WSA.WebCam.PhotoCapture.CreateAsync(false, OnCreatedPhotoCaptureObject);
}
void OnCreatedPhotoCaptureObject(UnityEngine.XR.WSA.WebCam.PhotoCapture captureObject)
{
m_PhotoCaptureObj = captureObject;
m_PhotoCaptureObj.StartPhotoModeAsync(m_CameraParameters, OnStartPhotoMode);
isReady = true;
}
void OnStartPhotoMode(UnityEngine.XR.WSA.WebCam.PhotoCapture.PhotoCaptureResult result)
{
Debug.Log("Photo mode ready!");
}
private void Update()
{
if (isReady && m_CapturingPhoto == false && isThreadRunning == false)
{
lastCameraRotation = Camera.main.transform.rotation;
m_CapturingPhoto = true;
m_PhotoCaptureObj.TakePhotoAsync(OnPhotoCaptured);
}
}
void OnPhotoCaptured(UnityEngine.XR.WSA.WebCam.PhotoCapture.PhotoCaptureResult result, UnityEngine.XR.WSA.WebCam.PhotoCaptureFrame photoCaptureFrame)
{
m_CapturingPhoto = false;
Matrix4x4 cameraToWorldMatrix;
photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;
Matrix4x4 projectionMatrix;
photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);
photoCaptureFrame.UploadImageDataToTexture(m_Texture);
Utils.texture2DToMat(m_Texture, rgbaMat);
#if !UNITY_EDITOR
isThreadRunning = true;
System.Threading.Tasks.Task task = System.Threading.Tasks.Task.Run(() => UpdateFacePosition(photoCaptureFrame));
#else
UpdateFacePosition(photoCaptureFrame);
#endif
}
void UpdateFacePosition(UnityEngine.XR.WSA.WebCam.PhotoCaptureFrame photoCaptureFrame)
{
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
Imgproc.equalizeHist(grayMat, grayMat);
Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(255, 0, 0, 255), 2);
if (cascade != null)
cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(grayMat.cols() * 0.05, grayMat.rows() * 0.05), new Size());
OpenCVForUnity.Rect[] rects = faces.toArray();
for (int i = 0; i < rects.Length; i++)
{
// Debug.Log ("detect faces " + rects [i]);
Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2);
}
if (rects.Length > 0)
{
float x = rects[0].x != 0 ? ((float)rects[0].x + (rects[0].width / 2)) / rgbaMat.width() : 0;
float y = rects[0].y != 0 ? ((float)rects[0].y + (rects[0].height / 2)) / rgbaMat.height() : 0;
centerOnScreenPoint.UpdatePosition(new Vector2(x, 1 - y), lastCameraRotation);
}
isThreadRunning = false;
}
void OnStopPhotoMode(UnityEngine.XR.WSA.WebCam.PhotoCapture.PhotoCaptureResult result)
{
Debug.Log("StopPhotoMode!");
m_PhotoCaptureObj.Dispose();
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable()
{
if (m_PhotoCaptureObj != null)
m_PhotoCaptureObj.StopPhotoModeAsync(OnStopPhotoMode);
if (rgbaMat != null)
rgbaMat.Dispose();
if (grayMat != null)
grayMat.Dispose();
if (cascade != null)
cascade.Dispose();
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment