Last active
February 9, 2021 21:10
-
-
Save marns/0df3335b10b8f2065ae29f18de2c04dd to your computer and use it in GitHub Desktop.
ARCameraBackground with legacy renderer support stripped, + ARCameraBlur to blit camera frames to a texture and apply an optional blur / postprocess pass. Add ARCameraBackgroundURP to your camera instead of ARCameraBackground, and add ARCameraBlur with a RenderTexture. Draw that texture somewhere, e.g. on a Canvas image.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
using System.Collections.Generic; | |
using UnityEngine.Serialization; | |
namespace UnityEngine.XR.ARFoundation | |
{ | |
/// <summary> | |
/// ARCameraBackgroundURP without Legacy Renderer support. | |
/// | |
/// <para>Add this component to a <c>Camera</c> to copy the color camera's texture onto the background.</para> | |
/// <para>If you are using the Lightweight Render Pipeline (version 5.7.2 or later) or the Univerisal Render | |
/// Pipeline (version 7.0.0 or later), you must also add the <see cref="ARBackgroundRendererFeature"/> to the list | |
/// of render features for the scriptable renderer.</para> | |
/// </summary> | |
/// <remarks> | |
/// <para> | |
/// To add the <see cref="ARBackgroundRendererFeature"/> to the list of render features for the scriptable | |
/// renderer: | |
/// <list type="bullet"> | |
/// <item><description>In Project Settings -> Graphics, select the render pipeline asset (either | |
/// <c>LightweightRenderPipelineAsset</c> or <c>UniversalRenderPipelineAsset</c>) that is in the Scriptable Render | |
/// Pipeline Settings field.</description></item> | |
/// <item><description>In the Inspector with the render pipeline asset selected, ensure that the Render Type is set | |
/// to Custom.</description></item> | |
/// <item><description>In the Inspector with the render pipeline asset selected, select the Render Type -> Data | |
/// asset which would be of type <c>ForwardRendererData</c>.</description></item> | |
/// <item><description>In the Inspector with the forward renderer data selected, ensure the Render Features list | |
/// contains a <see cref="ARBackgroundRendererFeature"/>.</description></item> | |
/// </list> | |
/// </para> | |
/// <para>To customize background rendering with a scriptable render pipeline, create a | |
/// <c>ScriptableRendererFeature</c> with the background rendering commands, and insert the | |
/// <c>ScriptableRendererFeature</c> into the list of render features for the scriptable renderer.</para> | |
/// </remarks> | |
[DisallowMultipleComponent] | |
[RequireComponent(typeof(Camera))] | |
[RequireComponent(typeof(ARCameraManager))] | |
public class ARCameraBackgroundURP : MonoBehaviour | |
{ | |
/// <summary> | |
/// Name of the shader parameter for the display transform matrix. | |
/// </summary> | |
const string k_DisplayTransformName = "_UnityDisplayTransform"; | |
/// <summary> | |
/// Property ID for the shader parameter for the display transform matrix. | |
/// </summary> | |
static readonly int k_DisplayTransformId = Shader.PropertyToID(k_DisplayTransformName); | |
/// <summary> | |
/// The Property ID for the shader parameter for the forward vector's scaled length. | |
/// </summary> | |
static readonly int k_CameraForwardScaleId = Shader.PropertyToID("_UnityCameraForwardScale"); | |
/// <summary> | |
/// The camera to which the projection matrix is set on each frame event. | |
/// </summary> | |
Camera m_Camera; | |
/// <summary> | |
/// The camera manager from which frame information is pulled. | |
/// </summary> | |
ARCameraManager m_CameraManager; | |
/// <summary> | |
/// The occlusion manager, which may not exist, from which occlusion information is pulled. | |
/// </summary> | |
AROcclusionManager m_OcclusionManager; | |
/// <summary> | |
/// Whether to use the custom material for rendering the background. | |
/// </summary> | |
[SerializeField, FormerlySerializedAs("m_OverrideMaterial")] | |
bool m_UseCustomMaterial; | |
/// <summary> | |
/// A custom material for rendering the background. | |
/// </summary> | |
[SerializeField, FormerlySerializedAs("m_Material")] | |
Material m_CustomMaterial; | |
/// <summary> | |
/// The previous clear flags for the camera, if any. | |
/// </summary> | |
CameraClearFlags? m_PreviousCameraClearFlags; | |
/// <summary> | |
/// The original field of view of the camera, before enabling background rendering. | |
/// </summary> | |
float? m_PreviousCameraFieldOfView; | |
/// <summary> | |
/// Whether background rendering is enabled. | |
/// </summary> | |
bool m_BackgroundRenderingEnabled; | |
/// <summary> | |
/// The camera to which the projection matrix is set on each frame event. | |
/// </summary> | |
/// <value> | |
/// The camera to which the projection matrix is set on each frame event. | |
/// </value> | |
#if UNITY_EDITOR | |
protected new Camera camera => m_Camera; | |
#else // UNITY_EDITOR | |
protected Camera camera => m_Camera; | |
#endif // UNITY_EDITOR | |
/// <summary> | |
/// The camera manager from which frame information is pulled. | |
/// </summary> | |
/// <value> | |
/// The camera manager from which frame information is pulled. | |
/// </value> | |
protected ARCameraManager cameraManager => m_CameraManager; | |
/// <summary> | |
/// The occlusion manager, which may not exist, from which occlusion information is pulled. | |
/// </summary> | |
protected AROcclusionManager occlusionManager => m_OcclusionManager; | |
/// <summary> | |
/// The current <c>Material</c> used for background rendering. | |
/// </summary> | |
public Material material => (useCustomMaterial && (customMaterial != null)) ? customMaterial : defaultMaterial; | |
/// <summary> | |
/// Whether to use the custom material for rendering the background. | |
/// </summary> | |
/// <value> | |
/// <c>true</c> if the custom material should be used for rendering the camera background. Otherwise, | |
/// <c>false</c>. | |
/// </value> | |
public bool useCustomMaterial { get => m_UseCustomMaterial; set => m_UseCustomMaterial = value; } | |
/// <summary> | |
/// A custom material for rendering the background. | |
/// </summary> | |
/// <value> | |
/// A custom material for rendering the background. | |
/// </value> | |
public Material customMaterial { get => m_CustomMaterial; set => m_CustomMaterial = value; } | |
/// <summary> | |
/// Whether background rendering is enabled. | |
/// </summary> | |
/// <value> | |
/// <c>true</c> if background rendering is enabled and if at least one camera frame has been received. | |
/// Otherwise, <c>false</c>. | |
/// </value> | |
public bool backgroundRenderingEnabled => m_BackgroundRenderingEnabled; | |
/// <summary> | |
/// The default material for rendering the background. | |
/// </summary> | |
/// <value> | |
/// The default material for rendering the background. | |
/// </value> | |
Material defaultMaterial => cameraManager.cameraMaterial; | |
void Awake() | |
{ | |
m_Camera = GetComponent<Camera>(); | |
m_CameraManager = GetComponent<ARCameraManager>(); | |
m_OcclusionManager = GetComponent<AROcclusionManager>(); | |
} | |
void OnEnable() | |
{ | |
// Ensure that background rendering is disabled until the first camera frame is received. | |
m_BackgroundRenderingEnabled = false; | |
cameraManager.frameReceived += OnCameraFrameReceived; | |
if (occlusionManager != null) | |
{ | |
occlusionManager.frameReceived += OnOcclusionFrameReceived; | |
} | |
} | |
void OnDisable() | |
{ | |
if (occlusionManager != null) | |
{ | |
occlusionManager.frameReceived -= OnOcclusionFrameReceived; | |
} | |
cameraManager.frameReceived -= OnCameraFrameReceived; | |
DisableBackgroundRendering(); | |
// We are no longer setting the projection matrix so tell the camera to resume its normal projection matrix | |
// calculations. | |
camera.ResetProjectionMatrix(); | |
} | |
/// <summary> | |
/// Enable background rendering by disabling the camera's clear flags | |
/// </summary> | |
void EnableBackgroundRendering() | |
{ | |
m_BackgroundRenderingEnabled = true; | |
DisableBackgroundClearFlags(); | |
m_PreviousCameraFieldOfView = m_Camera.fieldOfView; | |
} | |
/// <summary> | |
/// Disable background rendering by restoring the camera's clear flags. | |
/// </summary> | |
void DisableBackgroundRendering() | |
{ | |
m_BackgroundRenderingEnabled = false; | |
RestoreBackgroundClearFlags(); | |
if (m_PreviousCameraFieldOfView.HasValue) | |
{ | |
m_Camera.fieldOfView = m_PreviousCameraFieldOfView.Value; | |
m_PreviousCameraFieldOfView = null; | |
} | |
} | |
/// <summary> | |
/// Set the camera's clear flags to do nothing while preserving the previous camera clear flags. | |
/// </summary> | |
void DisableBackgroundClearFlags() | |
{ | |
m_PreviousCameraClearFlags = m_Camera.clearFlags; | |
m_Camera.clearFlags = CameraClearFlags.Nothing; | |
} | |
/// <summary> | |
/// Restore the previous camera's clear flags, if any. | |
/// </summary> | |
void RestoreBackgroundClearFlags() | |
{ | |
if (m_PreviousCameraClearFlags != null) | |
{ | |
m_Camera.clearFlags = m_PreviousCameraClearFlags.Value; | |
} | |
} | |
/// <summary> | |
/// Callback for the camera frame event. | |
/// </summary> | |
/// <param name="eventArgs">The camera event arguments.</param> | |
void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs) | |
{ | |
// Enable background rendering when first frame is received. | |
if (m_BackgroundRenderingEnabled) | |
{ | |
if (eventArgs.textures.Count == 0) | |
{ | |
DisableBackgroundRendering(); | |
} | |
} | |
else if (eventArgs.textures.Count > 0) | |
{ | |
EnableBackgroundRendering(); | |
} | |
Material material = this.material; | |
if (material != null) | |
{ | |
var count = eventArgs.textures.Count; | |
for (int i = 0; i < count; ++i) | |
{ | |
material.SetTexture(eventArgs.propertyNameIds[i], eventArgs.textures[i]); | |
} | |
if (eventArgs.displayMatrix.HasValue) | |
{ | |
material.SetMatrix(k_DisplayTransformId, eventArgs.displayMatrix.Value); | |
} | |
SetMaterialKeywords(material, eventArgs.enabledMaterialKeywords, eventArgs.disabledMaterialKeywords); | |
} | |
if (eventArgs.projectionMatrix.HasValue) | |
{ | |
camera.projectionMatrix = eventArgs.projectionMatrix.Value; | |
const float twiceRad2Deg = 2 * Mathf.Rad2Deg; | |
var halfHeightOverNear = 1 / camera.projectionMatrix[1, 1]; | |
camera.fieldOfView = Mathf.Atan(halfHeightOverNear) * twiceRad2Deg; | |
} | |
} | |
/// <summary> | |
/// Callback for the occlusion frame event. | |
/// </summary> | |
/// <param name="eventArgs">The occlusion frame event arguments.</param> | |
void OnOcclusionFrameReceived(AROcclusionFrameEventArgs eventArgs) | |
{ | |
Material material = this.material; | |
if (material != null) | |
{ | |
var count = eventArgs.textures.Count; | |
for (int i = 0; i < count; ++i) | |
{ | |
material.SetTexture(eventArgs.propertyNameIds[i], eventArgs.textures[i]); | |
} | |
SetMaterialKeywords(material, eventArgs.enabledMaterialKeywords, eventArgs.disabledMaterialKeywords); | |
// Set scale: this computes the affect the camera's localToWorld has on the the length of the | |
// forward vector, i.e., how much farther from the camera are things than with unit scale. | |
var forward = transform.localToWorldMatrix.GetColumn(2); | |
var scale = forward.magnitude; | |
material.SetFloat(k_CameraForwardScaleId, scale); | |
} | |
} | |
void SetMaterialKeywords(Material material, List<string> enabledMaterialKeywords, | |
List<string> disabledMaterialKeywords) | |
{ | |
if (enabledMaterialKeywords != null) | |
{ | |
foreach (var materialKeyword in enabledMaterialKeywords) | |
{ | |
if (!material.IsKeywordEnabled(materialKeyword)) | |
{ | |
material.EnableKeyword(materialKeyword); | |
} | |
} | |
} | |
if (disabledMaterialKeywords != null) | |
{ | |
foreach (var materialKeyword in disabledMaterialKeywords) | |
{ | |
if (material.IsKeywordEnabled(materialKeyword)) | |
{ | |
material.DisableKeyword(materialKeyword); | |
} | |
} | |
} | |
} | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
using UnityEngine; | |
using UnityEngine.XR.ARFoundation; | |
namespace Postprocessing | |
{ | |
public class ARCameraBlur : MonoBehaviour | |
{ | |
public bool BlurEnabled = false; | |
[SerializeField] private RenderTexture _renderTexture = null; | |
[SerializeField] private Material _blurMat = null; | |
private Camera _camera; | |
private ARCameraManager _cameraManager; | |
private RenderTexture _tempTexture; | |
void Awake() | |
{ | |
_camera = Camera.main; | |
_cameraManager = _camera.GetComponent<ARCameraManager>(); | |
} | |
void OnDestroy() | |
{ | |
if (_tempTexture) | |
{ | |
Destroy(_tempTexture); | |
_tempTexture = null; | |
} | |
} | |
void OnEnable() | |
{ | |
_cameraManager.frameReceived += OnCameraFrameReceived; | |
} | |
void OnDisable() | |
{ | |
_cameraManager.frameReceived -= OnCameraFrameReceived; | |
} | |
private void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs) | |
{ | |
// Copy the camera frame to render texture | |
Graphics.Blit(null, _renderTexture, _cameraManager.cameraMaterial); | |
if (BlurEnabled) | |
{ | |
if (_tempTexture == null) { | |
_tempTexture = new RenderTexture(_renderTexture); | |
} | |
// Apply blur shader passes | |
Graphics.Blit(_renderTexture, _tempTexture, _blurMat, 0); // Vertical blur | |
Graphics.Blit(_tempTexture, _renderTexture, _blurMat, 1); // Horizontal blur | |
} | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment