Skip to content

Instantly share code, notes, and snippets.

@otmb
Last active December 4, 2018 09:48
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save otmb/e4eeebeb8bfe7730e892d4ded28b31c7 to your computer and use it in GitHub Desktop.
Save otmb/e4eeebeb8bfe7730e892d4ded28b31c7 to your computer and use it in GitHub Desktop.
KinectV2 visualize
using UnityEngine;
using Windows.Kinect;
using System.Runtime.InteropServices;
using System;
using System.Linq;
using UnityEngine.Rendering;
[RequireComponent(typeof(MeshRenderer))]
[RequireComponent(typeof(MeshFilter))]
public class KinectView : MonoBehaviour {
[SerializeField] private Shader shader;
Mesh mesh;
private CoordinateMapper _Mapper;
private FrameDescription colorFrameDesc;
private FrameDescription depthFrameDesc;
private MultiSourceFrameReader multiFrameSourceReader;
ushort[] depthFrameData;
byte[] colorFrameData;
CameraSpacePoint[] cameraSpacePoints;
ColorSpacePoint[] colorSpacePoints;
private int bytesPerPixel = 4;
private KinectSensor _Sensor;
byte[] bodyIndexBuffer;
// Use this for initialization
void Start()
{
mesh = new Mesh()
{
indexFormat = IndexFormat.UInt32,
};
_Sensor = KinectSensor.GetDefault();
if (_Sensor != null)
{
multiFrameSourceReader = _Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth
| FrameSourceTypes.Color | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body);
multiFrameSourceReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
_Mapper = _Sensor.CoordinateMapper;
depthFrameDesc = _Sensor.DepthFrameSource.FrameDescription;
int depthWidth = depthFrameDesc.Width;
int depthHeight = depthFrameDesc.Height;
depthFrameData = new ushort[depthWidth * depthHeight];
colorSpacePoints = new ColorSpacePoint[depthWidth * depthHeight];
cameraSpacePoints = new CameraSpacePoint[depthWidth * depthHeight];
colorFrameDesc = _Sensor.ColorFrameSource.FrameDescription;
int colorWidth = colorFrameDesc.Width;
int colorHeight = colorFrameDesc.Height;
// allocate space to put the pixels being received
colorFrameData = new byte[colorWidth * colorHeight * bytesPerPixel];
var bodyIndexFrameDesc = _Sensor.BodyIndexFrameSource.FrameDescription;
bodyIndexBuffer = new byte[bodyIndexFrameDesc.LengthInPixels];
// create mesh
Vector3[] vertices = new Vector3[depthWidth * depthHeight];
var indices = new int[vertices.Length];
for (int i = 0; i < vertices.Length; i++)
indices[i] = i;
mesh.MarkDynamic();
mesh.vertices = vertices;
mesh.SetIndices(indices, MeshTopology.Points, 0, false);
mesh.UploadMeshData(false);
var filter = GetComponent<MeshFilter>();
filter.sharedMesh = mesh;
if (!_Sensor.IsOpen)
{
_Sensor.Open();
}
}
}
// Update is called once per frame
void Update()
{
}
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
{
int depthWidth = 0;
int depthHeight = 0;
int colorWidth = 0;
int colorHeight = 0;
MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();
if (_Sensor == null) return;
if (multiSourceFrame != null)
{
using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame())
using (BodyIndexFrame bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame())
using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
{
if (depthFrame != null && colorFrame != null && bodyIndexFrame != null && bodyFrame != null)
{
FrameDescription depthFrameDescription = depthFrame.FrameDescription;
depthWidth = depthFrameDescription.Width;
depthHeight = depthFrameDescription.Height;
depthFrame.CopyFrameDataToArray(depthFrameData);
FrameDescription colorFrameDescription = colorFrame.FrameDescription;
colorWidth = colorFrameDescription.Width;
colorHeight = colorFrameDescription.Height;
colorFrame.CopyConvertedFrameDataToArray(colorFrameData, ColorImageFormat.Bgra);
bodyIndexFrame.CopyFrameDataToArray(bodyIndexBuffer);
_Mapper.MapDepthFrameToColorSpace(depthFrameData, colorSpacePoints);
_Mapper.MapDepthFrameToCameraSpace(depthFrameData, cameraSpacePoints);
int counter = 0;
var vertices = new Vector3[depthWidth * depthHeight];
var colors = new Color[depthWidth * depthHeight];
Body[] bodies = new Body[bodyFrame.BodyCount];
bodyFrame.GetAndRefreshBodyData(bodies);
Body body = bodies.FirstOrDefault(b => b.IsTracked);
var bodyPos = (body == null) ? Vector3.zero : new Vector3(
body.Joints[JointType.SpineMid].Position.X,
body.Joints[JointType.SpineMid].Position.Y,
body.Joints[JointType.SpineMid].Position.Z);
for (int y = 0; y < depthHeight; y++)
{
for (int x = 0; x < depthWidth; x++)
{
int depthIndex = (y * depthWidth) + x;
int bodyIndex = bodyIndexBuffer[depthIndex];
// Color a detected person
if (bodyIndex == 255)
{
continue;
}
CameraSpacePoint p = cameraSpacePoints[depthIndex];
ColorSpacePoint colorPoint = colorSpacePoints[depthIndex];
byte r = 0;
byte g = 0;
byte b = 0;
//byte a = 0;
int colorX = (int)System.Math.Floor(colorPoint.X + 0.5);
int colorY = (int)System.Math.Floor(colorPoint.Y + 0.5);
if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
{
int colorIndex = ((colorY * colorWidth) + colorX) * bytesPerPixel;
b = colorFrameData[colorIndex++];
g = colorFrameData[colorIndex++];
r = colorFrameData[colorIndex++];
}
if (!(double.IsInfinity(p.X)) && !(double.IsInfinity(p.Y)) && !(double.IsInfinity(p.Z)))
{
vertices[counter] = new Vector3(p.X, p.Y, p.Z) - bodyPos;
colors[counter] = new Color32(r, g, b, 1);
counter++;
}
}
}
mesh.vertices = vertices;
mesh.colors = colors;
}
}
}
}
void OnApplicationQuit()
{
multiFrameSourceReader.Dispose();
multiFrameSourceReader = null;
if (_Mapper != null)
{
_Mapper = null;
}
if (_Sensor != null)
{
if (_Sensor.IsOpen)
{
_Sensor.Close();
}
_Sensor = null;
}
}
}
Shader "Custom/KinectView" {
Properties
{
_Size("PointCloud Size", Float) = 1.0
}
SubShader{
Pass{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
uniform float _Size;
struct appdata {
float4 vertex : POSITION;
float4 col : COLOR;
};
struct v2f {
float4 vertex : SV_POSITION;
float4 col : COLOR;
float size : PSIZE;
};
v2f vert(appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.col = v.col;
o.size = _Size;
return o;
}
fixed4 frag(v2f i) : SV_Target
{
return i.col;
}
ENDCG
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment