Skip to content

Instantly share code, notes, and snippets.

@unitycoder
Last active February 1, 2024 11:17
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 2 You must be signed in to fork a gist
  • Save unitycoder/2c5de19ff77bd9d9532e3555b4327d0b to your computer and use it in GitHub Desktop.
Save unitycoder/2c5de19ff77bd9d9532e3555b4327d0b to your computer and use it in GitHub Desktop.
Write To Depth from Frag Shader (outDepth : SV_Depth)
// https://forum.unity.com/threads/depth-texture-on-objects-materials.1439071/
Shader "Unlit/ZDepthOnly"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
float color: COLOR;
};
sampler2D _MainTex;
float4 _MainTex_ST;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
o.color = o.vertex.w;
return o;
}
fixed4 frag (v2f i) : SV_Target
{
fixed4 col = pow(Linear01Depth(i.vertex.z), .85);
return col;
}
ENDCG
}
}
}
// https://github.com/staggartcreations/Graphics-Raycast/blob/master/GraphicsRaycast/GraphicRaycastShader.shader
Shader "Hidden/Raycast" {
SubShader
{
Tags { "RenderType"="Opaque" }
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
sampler2D_float _CameraDepthTexture;
struct v2f {
float4 pos : SV_POSITION;
float4 scrPos : TEXCOORD0;
float3 normal : NORMAL;
};
//Vertex Shader
v2f vert (appdata_base v){
v2f o;
o.pos = UnityObjectToClipPos (v.vertex);
o.scrPos = ComputeScreenPos(o.pos);
o.normal = v.normal;
return o;
}
//Fragment Shader
half4 frag (v2f i) : COLOR
{
float3 wNormal = UnityObjectToWorldNormal(i.normal.xyz);
//Remap from [-1 to 1] to [0 to 1]
wNormal.rb = wNormal.rb * 0.5 + 0.5;
float depth = Linear01Depth(tex2Dproj(_CameraDepthTexture, UNITY_PROJ_COORD(i.scrPos)).r);
return float4(wNormal, depth);
}
ENDCG
}
}
FallBack "Diffuse"
}
// shader to modify depth in frag
// https://forum.unity.com/threads/best-way-to-blur-a-depth-map.595627/#post-3989458
Shader "Hidden/Gaussian Blur Filter"
{
Properties
{
_MainTex("-", 2D) = "white" {}
_MyDepthTex("-", 2D) = "white" {}
}
CGINCLUDE
#include "UnityCG.cginc"
sampler2D _MainTex;
sampler2D_float _MyDepthTex;
float4 _MyDepthTex_TexelSize;
// 9-tap Gaussian filter with linear sampling
// http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/
half gaussian_filter(float2 uv, float2 stride)
{
half s = tex2D(_MyDepthTex, float4(uv, 0, 0)).r * 0.227027027;
float2 d1 = stride * 1.3846153846;
s += tex2D(_MyDepthTex, uv + d1).r * 0.3162162162;
s += tex2D(_MyDepthTex, uv - d1).r * 0.3162162162;
float2 d2 = stride * 3.2307692308;
s += tex2D(_MyDepthTex, uv + d2).r * 0.0702702703;
s += tex2D(_MyDepthTex, uv - d2).r * 0.0702702703;
return s;
}
// Quarter downsampler
half4 frag_quarter(v2f_img i, out float outDepth : SV_Depth) : SV_Target
{
float depth = SAMPLE_DEPTH_TEXTURE(_MyDepthTex, i.uv);
outDepth = depth;
float4 d = _MyDepthTex_TexelSize.xyxy * float4(1, 1, -1, -1);
half4 s;
s = tex2D(_MyDepthTex, i.uv + d.xy);
s += tex2D(_MyDepthTex, i.uv + d.xw);
s += tex2D(_MyDepthTex, i.uv + d.zy);
s += tex2D(_MyDepthTex, i.uv + d.zw);
return s * 0.25;
}
// Separable Gaussian filters
half4 frag_blur_h(v2f_img i, out float outDepth : SV_Depth) : SV_Target
{
outDepth = gaussian_filter(i.uv, float2(_MyDepthTex_TexelSize.x, 0));
return 0;
}
half4 frag_blur_v(v2f_img i, out float outDepth : SV_Depth) : SV_Target
{
outDepth = gaussian_filter(i.uv, float2(0, _MyDepthTex_TexelSize.y));
return 0;
}
ENDCG
Subshader
{
Pass
{
ZTest Always Cull Off ZWrite On
CGPROGRAM
#pragma vertex vert_img
#pragma fragment frag_quarter
ENDCG
}
Pass
{
ZTest Always Cull Off ZWrite On
CGPROGRAM
#pragma vertex vert_img
#pragma fragment frag_blur_h
#pragma target 3.0
ENDCG
}
Pass
{
ZTest Always Cull Off ZWrite On
CGPROGRAM
#pragma vertex vert_img
#pragma fragment frag_blur_v
#pragma target 3.0
ENDCG
}
}
}
// https://williamchyr.com/unity-shaders-depth-and-normal-textures/
Shader "Custom/DepthGrayscale" {
SubShader {
Tags { "RenderType"="Opaque" }
Pass{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
sampler2D _CameraDepthTexture;
struct v2f {
float4 pos : SV_POSITION;
float4 scrPos:TEXCOORD1;
};
//Vertex Shader
v2f vert (appdata_base v){
v2f o;
o.pos = mul (UNITY_MATRIX_MVP, v.vertex);
o.scrPos=ComputeScreenPos(o.pos);
//for some reason, the y position of the depth texture comes out inverted
o.scrPos.y = 1 - o.scrPos.y;
return o;
}
//Fragment Shader
half4 frag (v2f i) : COLOR{
float depthValue = Linear01Depth (tex2Dproj(_CameraDepthTexture, UNITY_PROJ_COORD(i.scrPos)).r);
half4 depth;
depth.r = depthValue;
depth.g = depthValue;
depth.b = depthValue;
depth.a = 1;
return depth;
}
ENDCG
}
}
FallBack "Diffuse"
}
Shader "Custom/DepthNormals" {
Properties {
_MainTex ("", 2D) = "white" {}
_HighlightDirection ("Highlight Direction", Vector) = (1, 0,0)
}
SubShader {
Tags { "RenderType"="Opaque" }
Pass{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
sampler2D _CameraDepthNormalsTexture;
float _StartingTime;
float _showNormalColors = 1; //when this is 1, show normal values as colors. when 0, show depth values as colors.
struct v2f {
float4 pos : SV_POSITION;
float4 scrPos: TEXCOORD1;
};
//Our Vertex Shader
v2f vert (appdata_base v){
v2f o;
o.pos = mul (UNITY_MATRIX_MVP, v.vertex);
o.scrPos=ComputeScreenPos(o.pos);
o.scrPos.y = 1 - o.scrPos.y;
return o;
}
sampler2D _MainTex;
float4 _HighlightDirection;
//Our Fragment Shader
half4 frag (v2f i) : COLOR{
float3 normalValues;
float depthValue;
//extract depth value and normal values
DecodeDepthNormal(tex2D(_CameraDepthNormalsTexture, i.scrPos.xy), depthValue, normalValues);
if (_showNormalColors == 1){
float4 normalColor = float4(normalValues, 1);
return normalColor;
} else {
float4 depth = float4(depthValue);
return depth;
}
}
ENDCG
}
}
FallBack "Diffuse"
}
// https://forum.unity.com/threads/writing-to-the-z-depth-buffer-in-urp.896942/
half4 frag (Varyings input, out float depth : SV_Depth) : SV_Target {
    half4 col = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, input.uv);
    depth = 1; // put whatever you want here for the depth
    return col;
}
// https://www.vertexfragment.com/ramblings/unity-custom-depth/
struct ForwardFragmentOutput
{
float4 Color : SV_Target;
float Depth : SV_Depth;
};
ForwardFragmentOutput FragMain(VertOutput input)
{
ForwardFragmentOutput output = (ForwardFragmentOutput)0;
output.Color = float4(0.0f, 1.0f, 0.0f, 1.0f);
output.Depth = input.position.w;
return output;
}
output.Depth = DEPTH_MAX;
// Convert World Position to Depth
float3 cameraPosition = _WorldSpaceCameraPos; // Unity provided position of the camera/eye.
float3 positionWS = float3(100.0f, 10.0f, 0.0f); // Position of the sample you want a depth value for.
float distanceToCamera = length(positionWS - cameraPosition);
float linearDepth = (distanceToCamera - _ProjectionParams.y) / (_ProjectionParams.z - _ProjectionParams.y);
float Linear01Depth(float depth, float4 zBufferParam)
{
return 1.0 / (zBufferParam.x * depth + zBufferParam.y);
}
float LinearDepthToRawDepth(float linearDepth)
{
return (1.0f - (linearDepth * _ZBufferParams.y)) / (linearDepth * _ZBufferParams.x);
}
// https://forum.unity.com/threads/how-to-extract-view-depth-from-a-camera-render-with-a-shader.1506944/#post-9448169
/*
* This shader is used to take the depth info from an already rendered texture
* and from it calculate the 3D points corresponding to a LIDAR output.
* The RGB values corresponds to XYZ positions. The A channel is left at 0.
*
* Camera depth information itself is not used directly!
*
* This code is Unity's own blend of HLSL (which is like C).
*
* Note that the output of this shader can be a different resolution than the
* input texture from the camera.The output corresponds to equally spaced rays,
* while the input is equally spaced pixels from a render.
*/
Shader "Custom/LidarShader"
{
/*
* Properties are inputs to the shader, they will show up in the Unity
* inspector for instance.
*/
Properties
{
/*
* Depth texture (camera output). The color format should be set to `None` and
* depth format to something non-zero.
* The name has to be "_MainTex" such that `Graphics.Blit()` can input the RenderTexture.
*/
_MainTex ("Depth Texture (no color)", 2D) = "white" {}
/*
* Create inputs for camera clipping plane.
* Camera info is also available in a shader, except we are processing a texture
* that's already rendered, i.e. the camera that made it is not available here!
*/
_NearPlane ("Camera Near Clipping Plane", float) = 0.01
_FarPlane ("Camera Far Clipping Plane", float) = 100.0
/*
* Create inputs for the FOV of the camera.
* Angles should be in radians, as sin/cos/tan here rely on radians.
*/
_MaxAngleVer ("Camera Vertical Angle (from focal)", float) = 0.78
_MaxAngleHor ("Camera Horizontal Angle (from focal)", float) = 0.78
/*
* Bounds of the near-plane (in Unity meters) of the camera.
* These can be computed directly from the FOV angles, but we save on the computation this way.
*/
_NearPlaneRight ("Right Edge Of Near Plane", float) = 1.0
_NearPlaneTop ("Top Edge Of Near Plane", float) = 1.0
/*
* Y- and x-rotation at rest for this camera. Needed to create 3D position for each point.
*/
_CameraAngleHor ("Camera Horizontal Angle Offset", float) = 0.0
_CameraAngleVer ("Camera Vertical Angle Offset", float) = 0.0
/*
* Constant offset in the camera position (e.g. w.r.t. the link parent).
* Offset is camear frame (_after_ camera rotation offset).
*/
_PositionOffset ("Camera Position Offset", Vector) = (0.0, 0.0, 0.0, 0.0)
}
SubShader
{
Tags { "RenderType"="Opaque" }
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
sampler2D_float _MainTex;
float4 _MainTex_ST;
float _NearPlane;
float _FarPlane;
float _MaxAngleVer;
float _MaxAngleHor;
float _CameraAngleHor;
float _CameraAngleVer;
float _NearPlaneRight;
float _NearPlaneTop;
float4 _PositionOffset;
struct appdata
{
/*
* Vertex original position in 3D (4th component is 1.0):
*/
float4 vertex : POSITION;
};
struct v2f {
/*
* Position of the vertex in the camera clip-space.
*/
float4 vertex : SV_POSITION;
/*
* Position on the screen as x,y in the range from 0.0 to 1.0.
*/
float2 screenPos : TEXCOORD0;
};
/*
* Vertex shader. Runs on each vertex in the 3D scene.
*/
v2f vert (appdata v)
{
v2f o;
// Basically gets the projection of the object coordinate unto the near-clip
// plane of the camera (= in clip space):
o.vertex = UnityObjectToClipPos(v.vertex);
// Get position on the screen corresponding to position in clip space:
o.screenPos = ComputeScreenPos(o.vertex);
return o;
}
/*
* Convert non-linear depth in 0.0 - 1.0 range to real distance.
*/
float DepthToDistance(float depth)
{
// This is just how Unity seems to have encoded the depth:
return _FarPlane * _NearPlane / (_FarPlane - depth * (_FarPlane - _NearPlane));
}
/*
* Fragment shader. Runs on each pixel to-be rendered on the screen.
*/
float4 frag(v2f i) : SV_Target
{
// This output pixel of the resulting texture really corresponds to a single LIDAR ray.
// So we need to calculate which location on the input texture (= camera output) corresponds
// to this ray.
i.screenPos = (i.screenPos - 0.5) * 2.0; // Re-map [0.0, 1.0] to [-1.0, 1.0]
// Angles of this specific ray:
float angleHor = _MaxAngleHor * i.screenPos.x; // Yaw, negative left from the focal
float angleVer = _MaxAngleVer * i.screenPos.y; // Pitch, negative below the focal
// Map from [-1.0, 1.0], [-1.0, 1.0] to [-nearRight, nearRight], [-nearTop, -nearTop]
i.screenPos.x *= _NearPlaneRight;
i.screenPos.y *= _NearPlaneTop;
// Calculate screen position, based on intersection of rays with a plane at `planeDist`
// in front of the viewpoint.
float planeDist = _NearPlane / (cos(angleHor) * cos(angleVer));
i.screenPos.x = planeDist * sin(angleHor) * cos(angleVer);
i.screenPos.y = planeDist * sin(angleVer);
// Revert map to [-1.0, 1.0]
i.screenPos.x /= _NearPlaneRight;
i.screenPos.y /= _NearPlaneTop;
i.screenPos = (i.screenPos / 2.0) + 0.5; // Revert map to [0.0, 1.0]
// Read depth directly from the texture (in the R-channel), as number between 0.0 and 1.0:
float depth = tex2D(_MainTex, i.screenPos);
// This function will interpolate based on the screen position of the pixel and also works if the resolutions are different.
// Some graphics APIs invert the meaning of depth, flip it:
#ifdef UNITY_REVERSED_Z
depth = 1.0 - depth;
#endif
if (depth >= 0.999999)
{
const float nan = 0.0 / 0.0; // Hacky way to get a NaN float
return float4(nan, nan, nan, 0.0); // Mark as 'miss'
}
// However, the scaling is not linear - make it so:
depth = DepthToDistance(depth);
// Depth values are really perpendicular to the camera, i.e. the z-value we look for.
// Compute the other coordinates from the spherical coordinates:
depth = depth / (cos(angleHor) * cos(angleVer));
// Account for the camera orientation w.r.t. its parent:
angleHor += _CameraAngleHor;
angleVer += _CameraAngleVer;
// 3D position in ROS2 (!) coordinates:
float4 pos = {
cos(angleHor) * cos(angleVer), // z
-sin(angleHor) * cos(angleVer), // -x
sin(angleVer), // y
0.0
};
// By already transforming to ROS coordinates we skip a post-processing step
pos = pos * depth + _PositionOffset;
pos.a = 1.0; // Mark this point as 'hit'
return pos;
}
ENDCG
}
}
}
@unitycoder
Copy link
Author

struct f_output
{
float4 color : COLOR;
float depth : SV_Depth;
};
f_output FS_Main(g2f input)
o.depth = input.vertex.z;
https://github.com/yeoez001/Clean_IATK_SteamVR/blob/b9b8f70ffa010058e69ee1e13c085a247c0eb961/Assets/IATK/Shaders/BarShader.shader#L493

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment