Skip to content

Instantly share code, notes, and snippets.

@ogoshen
Last active March 24, 2023 00:40
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 2 You must be signed in to fork a gist
  • Save ogoshen/37950ac3901080c3716fe56add2fb549 to your computer and use it in GitHub Desktop.
Save ogoshen/37950ac3901080c3716fe56add2fb549 to your computer and use it in GitHub Desktop.
RealSense T265 Fisheye Undistort Unity
Shader "Custom/Fisheye" {
Properties {
_MainTex ("MainTex", 2D) = "white" {}
// _UVMap ("UVMap", 2D) = "black" {}
_Gamma ("Gamma", float) = 0.45
}
SubShader {
Tags { "QUEUE"="Transparent" "IGNOREPROJECTOR"="true" "RenderType"="Transparent" "PreviewType"="Plane" }
Pass {
ZWrite Off
Cull Off
Fog { Mode Off }
ColorMask RGB
CGPROGRAM
#pragma vertex vert_img
#pragma fragment frag
#pragma target 3.0
#pragma glsl
#include "UnityCG.cginc"
sampler2D _MainTex;
uniform sampler2D _UVMap;
float _Gamma;
half4 frag (v2f_img pix) : SV_Target
{
float2 uv = pix.uv;
float2 uvmap = tex2D(_UVMap, uv).xy;
float Y = pow(tex2D(_MainTex, uvmap).a, _Gamma);
return float4(Y.rrr, 1);
}
ENDCG
}
}
FallBack Off
}
using Intel.RealSense;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Threading;
using UnityEngine;
using UnityEngine.Events;
public class RsStreamTextureRendererFisheye : MonoBehaviour
{
private static TextureFormat Convert(Format lrsFormat)
{
switch (lrsFormat)
{
case Format.Z16: return TextureFormat.R16;
case Format.Disparity16: return TextureFormat.R16;
case Format.Rgb8: return TextureFormat.RGB24;
case Format.Rgba8: return TextureFormat.RGBA32;
case Format.Bgra8: return TextureFormat.BGRA32;
case Format.Y8: return TextureFormat.Alpha8;
case Format.Y16: return TextureFormat.R16;
case Format.Raw16: return TextureFormat.R16;
case Format.Raw8: return TextureFormat.Alpha8;
case Format.Disparity32: return TextureFormat.RFloat;
case Format.Yuyv:
case Format.Bgr8:
case Format.Raw10:
case Format.Xyz32f:
case Format.Uyvy:
case Format.MotionRaw:
case Format.MotionXyz32f:
case Format.GpioRaw:
case Format.Any:
default:
throw new ArgumentException(string.Format("librealsense format: {0}, is not supported by Unity", lrsFormat));
}
}
private static int BPP(TextureFormat format)
{
switch (format)
{
case TextureFormat.ARGB32:
case TextureFormat.BGRA32:
case TextureFormat.RGBA32:
return 32;
case TextureFormat.RGB24:
return 24;
case TextureFormat.R16:
return 16;
case TextureFormat.R8:
case TextureFormat.Alpha8:
return 8;
default:
throw new ArgumentException("unsupported format {0}", format.ToString());
}
}
public RsFrameProvider Source;
[System.Serializable]
public class TextureEvent : UnityEvent<Texture> { }
public Stream _stream;
public Format _format;
public int _streamIndex;
public FilterMode filterMode = FilterMode.Point;
protected Texture2D texture;
public Texture2D uvmap;
public float fov = 90;
[Space]
public TextureEvent textureBinding;
FrameQueue q;
Predicate<Frame> matcher;
void Start()
{
Source.OnStart += OnStartStreaming;
Source.OnStop += OnStopStreaming;
}
void OnDestroy()
{
if (texture != null)
{
Destroy(texture);
texture = null;
}
if (q != null)
{
q.Dispose();
}
}
protected void OnStopStreaming()
{
Source.OnNewSample -= OnNewSample;
if (q != null)
{
q.Dispose();
q = null;
}
}
public void OnStartStreaming(PipelineProfile activeProfile)
{
q = new FrameQueue(1);
matcher = new Predicate<Frame>(Matches);
Source.OnNewSample += OnNewSample;
}
private bool Matches(Frame f)
{
using (var p = f.Profile)
return p.Stream == _stream && p.Format == _format && p.Index == _streamIndex;
}
void OnNewSample(Frame frame)
{
try
{
if (frame.IsComposite)
{
using (var fs = frame.As<FrameSet>())
using (var f = fs.FirstOrDefault(matcher))
{
if (f != null)
q.Enqueue(f);
return;
}
}
if (!matcher(frame))
return;
using (frame)
{
q.Enqueue(frame);
}
}
catch (Exception e)
{
Debug.LogException(e);
// throw;
}
}
bool HasTextureConflict(VideoFrame vf)
{
return !texture ||
texture.width != vf.Width ||
texture.height != vf.Height ||
BPP(texture.format) != vf.BitsPerPixel;
}
protected void LateUpdate()
{
if (q != null)
{
VideoFrame frame;
if (q.PollForFrame<VideoFrame>(out frame))
using (frame)
ProcessFrame(frame);
}
}
private void ProcessFrame(VideoFrame frame)
{
if (HasTextureConflict(frame))
{
if (texture != null)
{
Destroy(texture);
}
using (var p = frame.Profile)
{
bool linear = (QualitySettings.activeColorSpace != ColorSpace.Linear)
|| (p.Stream != Stream.Color && p.Stream != Stream.Infrared);
texture = new Texture2D(frame.Width, frame.Height, Convert(p.Format), false, linear)
{
wrapMode = TextureWrapMode.Clamp,
filterMode = filterMode
};
}
textureBinding.Invoke(texture);
}
texture.LoadRawTextureData(frame.Data, frame.Stride * frame.Height);
texture.Apply();
if (uvmap == null)
{
CreateUvMap(frame);
Shader.SetGlobalTexture("_UVMap", uvmap);
}
}
static Vector2Int Undistort(float x, float y, ref Intrinsics intrin, int W, int H, float fov)
{
var uv = new Vector2(x / W * intrin.width, y / H * intrin.height);
// see https://github.com/IntelRealSense/librealsense/blob/master/include/librealsense2/rsutil.h
uv.x = (uv.x - intrin.ppx) / intrin.fx;
uv.y = (uv.y - intrin.ppy) / intrin.fy;
float rd = uv.magnitude;
float theta = rd;
float theta2 = rd * rd;
for (int i = 0; i < 4; i++)
{
float f = theta * (1 + theta2 * (intrin.coeffs[0] + theta2 * (intrin.coeffs[1] + theta2 * (intrin.coeffs[2] + theta2 * intrin.coeffs[3])))) - rd;
float df = 1 + theta2 * (3 * intrin.coeffs[0] + theta2 * (5 * intrin.coeffs[1] + theta2 * (7 * intrin.coeffs[2] + 9 * theta2 * intrin.coeffs[3])));
theta -= f / df;
theta2 = theta * theta;
}
float r = Mathf.Tan(theta);
uv *= r / rd;
// see https://github.com/IntelRealSense/librealsense/blob/master/wrappers/python/examples/t265_stereo.py
float stereo_fov_rad = fov * (Mathf.PI / 180);
float stereo_height_px = H;
float stereo_focal_px = stereo_height_px / 2 / Mathf.Tan(stereo_fov_rad / 2);
uv = uv * stereo_focal_px + Vector2.one * (stereo_height_px - 1f) / 2f;
uv += Vector2.one * 0.5f;
return Vector2Int.RoundToInt(uv);
}
private void CreateUvMap(VideoFrame frame)
{
const int W = 848, H = 800;
if (uvmap)
DestroyImmediate(uvmap);
uvmap = new Texture2D(W, H, TextureFormat.RGFloat, false, true)
{
filterMode = FilterMode.Point,
wrapMode = TextureWrapMode.Clamp
};
using (var profile = frame.Profile.As<VideoStreamProfile>())
{
var intrinsics = profile.GetIntrinsics();
Debug.Log(intrinsics);
var uvs = new Vector2[H, W];
// see https://github.com/IntelRealSense/librealsense/blob/master/src/proc/align.cpp
for (int y = 0; y < H; ++y)
{
for (int x = 0; x < W; ++x)
{
var uv = Undistort(x - 0.5f, y - 0.5f, ref intrinsics, W, H, fov);
var uv1 = Undistort(x + 0.5f, y + 0.5f, ref intrinsics, W, H, fov);
if (uv.x < 0 || uv.y < 0 || uv1.x >= W || uv1.y >= H)
continue;
float u = (float)x / W;
float v = (float)y / H;
for (int j = uv.y; j <= uv1.y; ++j)
{
for (int i = uv.x; i <= uv1.x; ++i)
{
uvs[j, i].Set(u, v);
}
}
}
}
var h = GCHandle.Alloc(uvs, GCHandleType.Pinned);
uvmap.LoadRawTextureData(h.AddrOfPinnedObject(), W * H * 2 * sizeof(float));
uvmap.Apply();
h.Free();
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment