using System;
using UnityEngine.Serialization;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace UnityEngine.Rendering.HighDefinition
{
///
/// Base class for reflection like probes.
///
[ExecuteAlways]
public abstract partial class HDProbe : MonoBehaviour
{
///
/// Store the settings computed during a rendering
///
[Serializable]
public struct RenderData
{
[SerializeField, FormerlySerializedAs("worldToCameraRHS")]
Matrix4x4 m_WorldToCameraRHS;
[SerializeField, FormerlySerializedAs("projectionMatrix")]
Matrix4x4 m_ProjectionMatrix;
[SerializeField, FormerlySerializedAs("capturePosition")]
Vector3 m_CapturePosition;
[SerializeField]
Quaternion m_CaptureRotation;
[SerializeField]
float m_FieldOfView;
[SerializeField]
float m_Aspect;
/// World to camera matrix (Right Hand).
public Matrix4x4 worldToCameraRHS => m_WorldToCameraRHS;
/// Projection matrix.
public Matrix4x4 projectionMatrix => m_ProjectionMatrix;
/// The capture position.
public Vector3 capturePosition => m_CapturePosition;
/// The capture rotation.
public Quaternion captureRotation => m_CaptureRotation;
/// The field of view.
public float fieldOfView => m_FieldOfView;
/// The aspect ratio.
public float aspect => m_Aspect;
///
/// Instantiate a new RenderData from camera and position settings.
///
/// The camera settings used.
/// The position settings used.
public RenderData(CameraSettings camera, CameraPositionSettings position)
: this(
position.GetUsedWorldToCameraMatrix(),
camera.frustum.GetUsedProjectionMatrix(),
position.position,
position.rotation,
camera.frustum.fieldOfView,
camera.frustum.aspect
)
{
}
///
/// Instantiate a new RenderData from specified inputs.
///
/// The world to camera matrix (Right Hand)
/// The projection matrix.
/// The capture position.
/// The capture rotation.
/// The field of view.
/// The aspect ratio.
public RenderData(
Matrix4x4 worldToCameraRHS,
Matrix4x4 projectionMatrix,
Vector3 capturePosition,
Quaternion captureRotation,
float fov,
float aspect
)
{
m_WorldToCameraRHS = worldToCameraRHS;
m_ProjectionMatrix = projectionMatrix;
m_CapturePosition = capturePosition;
m_CaptureRotation = captureRotation;
m_FieldOfView = fov;
m_Aspect = aspect;
}
}
///
/// Backed values of the probe settings.
/// Don't use directly this except for migration code.
///
// Serialized Data
[SerializeField]
// This one is protected only to have access during migration of children classes.
// In children classes, it must be used only during the migration.
protected ProbeSettings m_ProbeSettings = ProbeSettings.NewDefault();
#pragma warning disable 649
[SerializeField]
ProbeSettingsOverride m_ProbeSettingsOverride;
[SerializeField]
ReflectionProxyVolumeComponent m_ProxyVolume;
#pragma warning restore 649
[SerializeField]
Texture m_BakedTexture;
[SerializeField]
Texture m_CustomTexture;
[SerializeField]
RenderData m_BakedRenderData;
[SerializeField]
RenderData m_CustomRenderData;
// Only used in editor, but this data needs to be probe instance specific
// (Contains: UI section states)
[SerializeField]
uint m_EditorOnlyData;
// Runtime Data
RTHandle m_RealtimeTexture;
RTHandle m_RealtimeDepthBuffer;
RenderData m_RealtimeRenderData;
bool m_WasRenderedSinceLastOnDemandRequest = true;
#if UNITY_EDITOR
bool m_WasRenderedDuringAsyncCompilation = false;
#endif
// Array of names that will be used in the Render Loop to name the probes in debug
internal string[] probeName = new string[6];
//This probe object is dumb, its the caller / pipelines responsability
//to calculate its exposure values, since this requires frame data.
float m_ProbeExposureValue = 1.0f;
//Set and used by the pipeline, depending on the resolved configuration of a probe.
public bool ExposureControlEnabled { set; get; }
internal void SetProbeExposureValue(float exposure)
{
m_ProbeExposureValue = exposure;
}
internal float ProbeExposureValue()
{
return m_ProbeExposureValue;
}
internal bool requiresRealtimeUpdate
{
get
{
#if UNITY_EDITOR
if (m_WasRenderedDuringAsyncCompilation && !ShaderUtil.anythingCompiling)
return true;
#endif
if (mode != ProbeSettings.Mode.Realtime)
return false;
switch (realtimeMode)
{
case ProbeSettings.RealtimeMode.EveryFrame: return true;
case ProbeSettings.RealtimeMode.OnEnable: return !wasRenderedAfterOnEnable;
case ProbeSettings.RealtimeMode.OnDemand: return !m_WasRenderedSinceLastOnDemandRequest;
default: throw new ArgumentOutOfRangeException(nameof(realtimeMode));
}
}
}
internal bool HasValidRenderedData()
{
bool hasValidTexture = texture != null;
if (mode != ProbeSettings.Mode.Realtime)
{
return hasValidTexture;
}
else
{
return hasEverRendered && hasValidTexture;
}
}
// Public API
// Texture asset
///
/// The baked texture. Can be null if the probe was never baked.
///
/// Most of the time, you do not need to set this value yourself. You can set this property in situations
/// where you want to manually assign data that differs from what Unity generates.
///
public Texture bakedTexture
{
get => m_BakedTexture;
set => m_BakedTexture = value;
}
///
/// Texture used in custom mode.
///
public Texture customTexture
{
get => m_CustomTexture;
set => m_CustomTexture = value;
}
///
/// The allocated realtime texture. Can be null if the probe never rendered with the realtime mode.
///
/// Most of the time, you do not need to set this value yourself. You can set this property in situations
/// where you want to manually assign data that differs from what Unity generates.
///
public RenderTexture realtimeTexture
{
get => m_RealtimeTexture != null ? m_RealtimeTexture : null;
set
{
if (m_RealtimeTexture != null)
m_RealtimeTexture.Release();
m_RealtimeTexture = RTHandles.Alloc(value);
m_RealtimeTexture.rt.name = $"ProbeRealTimeTexture_{name}";
}
}
///
/// The allocated realtime depth texture. Can be null if the probe never rendered with the realtime mode.
///
/// Most of the time, you do not need to set this value yourself. You can set this property in situations
/// where you want to manually assign data that differs from what Unity generates.
///
public RenderTexture realtimeDepthTexture
{
get => m_RealtimeDepthBuffer != null ? m_RealtimeDepthBuffer : null;
set
{
if (m_RealtimeDepthBuffer != null)
m_RealtimeDepthBuffer.Release();
m_RealtimeDepthBuffer = RTHandles.Alloc(value);
m_RealtimeDepthBuffer.rt.name = $"ProbeRealTimeDepthTexture_{name}";
}
}
///
/// Returns an RThandle reference to the realtime texture where the color result of the probe is stored.
///
public RTHandle realtimeTextureRTH
{
get => m_RealtimeTexture;
}
///
/// Returns an RThandle reference to the realtime texture where the depth result of the probe is stored.
///
public RTHandle realtimeDepthTextureRTH
{
get => m_RealtimeDepthBuffer;
}
///
/// The texture used during lighting for this probe.
///
public Texture texture => GetTexture(mode);
///
/// Get the texture for a specific mode.
///
/// The mode to query.
/// The texture for this specified mode.
/// When the is invalid.
public Texture GetTexture(ProbeSettings.Mode targetMode)
{
switch (targetMode)
{
case ProbeSettings.Mode.Baked: return m_BakedTexture;
case ProbeSettings.Mode.Custom: return m_CustomTexture;
case ProbeSettings.Mode.Realtime: return m_RealtimeTexture;
default: throw new ArgumentOutOfRangeException();
}
}
///
/// Set the texture for a specific target mode.
///
/// The mode to update.
/// The texture to set.
/// The texture that was set.
/// When the texture is invalid.
/// When the mode is invalid
public Texture SetTexture(ProbeSettings.Mode targetMode, Texture texture)
{
if (targetMode == ProbeSettings.Mode.Realtime && !(texture is RenderTexture))
throw new ArgumentException("'texture' must be a RenderTexture for the Realtime mode.");
switch (targetMode)
{
case ProbeSettings.Mode.Baked: return m_BakedTexture = texture;
case ProbeSettings.Mode.Custom: return m_CustomTexture = texture;
case ProbeSettings.Mode.Realtime: return realtimeTexture = (RenderTexture)texture;
default: throw new ArgumentOutOfRangeException();
}
}
///
/// Set the depth texture for a specific target mode.
///
/// The mode to update.
/// The texture to set.
/// The texture that was set.
public Texture SetDepthTexture(ProbeSettings.Mode targetMode, Texture texture)
{
if (targetMode == ProbeSettings.Mode.Realtime && !(texture is RenderTexture))
throw new ArgumentException("'texture' must be a RenderTexture for the Realtime mode.");
switch (targetMode)
{
case ProbeSettings.Mode.Baked: return m_BakedTexture = texture;
case ProbeSettings.Mode.Custom: return m_CustomTexture = texture;
case ProbeSettings.Mode.Realtime: return realtimeDepthTexture = (RenderTexture)texture;
default: throw new ArgumentOutOfRangeException();
}
}
///
/// The render data of the last bake
///
public RenderData bakedRenderData { get => m_BakedRenderData; set => m_BakedRenderData = value; }
///
/// The render data of the custom mode
///
public RenderData customRenderData { get => m_CustomRenderData; set => m_CustomRenderData = value; }
///
/// The render data of the last realtime rendering
///
public RenderData realtimeRenderData { get => m_RealtimeRenderData; set => m_RealtimeRenderData = value; }
///
/// The currently used render data.
///
public RenderData renderData => GetRenderData(mode);
///
/// Get the render data of a specific mode.
///
/// Note: The HDProbe stores only one RenderData per mode, even for view dependent probes with multiple viewers.
/// In that case, make sure that you have set the RenderData relative to the expected viewer before rendering.
/// Otherwise the data retrieved by this function will be wrong.
///
/// The mode to query
/// The requested render data
/// When the mode is invalid
public RenderData GetRenderData(ProbeSettings.Mode targetMode)
{
switch (targetMode)
{
case ProbeSettings.Mode.Baked: return bakedRenderData;
case ProbeSettings.Mode.Custom: return customRenderData;
case ProbeSettings.Mode.Realtime: return realtimeRenderData;
default: throw new ArgumentOutOfRangeException();
}
}
///
/// Set the render data for a specific mode.
///
/// Note: The HDProbe stores only one RenderData per mode, even for view dependent probes with multiple viewers.
/// In that case, make sure that you have set the RenderData relative to the expected viewer before rendering.
///
/// The mode to update
/// The data to set
/// When the mode is invalid
public void SetRenderData(ProbeSettings.Mode targetMode, RenderData renderData)
{
switch (targetMode)
{
case ProbeSettings.Mode.Baked: bakedRenderData = renderData; break;
case ProbeSettings.Mode.Custom: customRenderData = renderData; break;
case ProbeSettings.Mode.Realtime: realtimeRenderData = renderData; break;
default: throw new ArgumentOutOfRangeException();
}
}
// Settings
// General
///
/// The probe type
///
public ProbeSettings.ProbeType type { get => m_ProbeSettings.type; protected set => m_ProbeSettings.type = value; }
/// The capture mode.
public ProbeSettings.Mode mode { get => m_ProbeSettings.mode; set => m_ProbeSettings.mode = value; }
///
/// The realtime mode of the probe
///
public ProbeSettings.RealtimeMode realtimeMode { get => m_ProbeSettings.realtimeMode; set => m_ProbeSettings.realtimeMode = value; }
///
/// Resolution of the probe.
///
public PlanarReflectionAtlasResolution resolution
{
get
{
var hdrp = (HDRenderPipeline)RenderPipelineManager.currentPipeline;
// We return whatever value is in resolution if there is no hdrp pipeline (nothing will work anyway)
return hdrp != null ? m_ProbeSettings.resolutionScalable.Value(hdrp.asset.currentPlatformRenderPipelineSettings.planarReflectionResolution) : m_ProbeSettings.resolution;
}
}
// Lighting
/// Light layer to use by this probe.
public LightLayerEnum lightLayers
{ get => m_ProbeSettings.lighting.lightLayer; set => m_ProbeSettings.lighting.lightLayer = value; }
/// This function return a mask of light layers as uint and handle the case of Everything as being 0xFF and not -1
public uint lightLayersAsUInt => lightLayers < 0 ? (uint)LightLayerEnum.Everything : (uint)lightLayers;
/// Multiplier factor of reflection (non PBR parameter).
public float multiplier
{ get => m_ProbeSettings.lighting.multiplier; set => m_ProbeSettings.lighting.multiplier = value; }
/// Weight for blending amongst probes (non PBR parameter).
public float weight
{ get => m_ProbeSettings.lighting.weight; set => m_ProbeSettings.lighting.weight = value; }
/// The distance at which reflections smoothly fade out before HDRP cut them completely.
public float fadeDistance
{ get => m_ProbeSettings.lighting.fadeDistance; set => m_ProbeSettings.lighting.fadeDistance = value; }
/// The result of the rendering of the probe will be divided by this factor. When the probe is read, this factor is undone as the probe data is read. This is to simply avoid issues with values clamping due to precision of the storing format.
public float rangeCompressionFactor
{ get => m_ProbeSettings.lighting.rangeCompressionFactor; set => m_ProbeSettings.lighting.rangeCompressionFactor = value; }
// Proxy
/// ProxyVolume currently used by this probe.
public ReflectionProxyVolumeComponent proxyVolume
{
get => m_ProxyVolume;
set => m_ProxyVolume = value;
}
///
/// Use the influence volume as the proxy volume if this is true.
///
public bool useInfluenceVolumeAsProxyVolume => m_ProbeSettings.proxySettings.useInfluenceVolumeAsProxyVolume;
/// Is the projection at infinite? Value could be changed by Proxy mode.
public bool isProjectionInfinite
=> m_ProxyVolume != null && m_ProxyVolume.proxyVolume.shape == ProxyShape.Infinite
|| m_ProxyVolume == null && !m_ProbeSettings.proxySettings.useInfluenceVolumeAsProxyVolume;
// Influence
/// InfluenceVolume of the probe.
public InfluenceVolume influenceVolume
{
get => m_ProbeSettings.influence ?? (m_ProbeSettings.influence = new InfluenceVolume());
private set => m_ProbeSettings.influence = value;
}
// Camera
/// Frame settings in use with this probe.
public ref FrameSettings frameSettings => ref m_ProbeSettings.cameraSettings.renderingPathCustomFrameSettings;
///
/// Specify the settings overriden for the frame settins
///
public ref FrameSettingsOverrideMask frameSettingsOverrideMask => ref m_ProbeSettings.cameraSettings.renderingPathCustomFrameSettingsOverrideMask;
///
/// The extents of the proxy volume
///
public Vector3 proxyExtents
=> proxyVolume != null ? proxyVolume.proxyVolume.extents : influenceExtents;
///
/// The bounding sphere of the influence
///
public BoundingSphere boundingSphere => influenceVolume.GetBoundingSphereAt(transform.position);
///
/// The bounding box of the influence
///
public Bounds bounds => influenceVolume.GetBoundsAt(transform.position);
///
/// To read the settings of this probe, most of the time you should use the sanitized version of
/// this property: .
/// Use this property to read the settings of the probe only when it is important that you read the raw data.
///
public ref ProbeSettings settingsRaw => ref m_ProbeSettings;
///
/// Use this property to get the settings used for calculations.
///
/// To edit the settings of the probe, use the unsanitized version of this property: .
///
public ProbeSettings settings
{
get
{
var settings = m_ProbeSettings;
// Special case here, we reference a component that is a wrapper
// So we need to update with the actual value for the proxyVolume
settings.proxy = m_ProxyVolume?.proxyVolume;
settings.influence = settings.influence ?? new InfluenceVolume();
return settings;
}
}
internal Matrix4x4 influenceToWorld => Matrix4x4.TRS(transform.position, transform.rotation, Vector3.one);
internal Vector3 influenceExtents => influenceVolume.extents;
internal Matrix4x4 proxyToWorld
=> proxyVolume != null
? Matrix4x4.TRS(proxyVolume.transform.position, proxyVolume.transform.rotation, Vector3.one)
: influenceToWorld;
internal bool wasRenderedAfterOnEnable { get; private set; } = false;
internal bool hasEverRendered { get; private set; } = false;
internal void SetIsRendered()
{
#if UNITY_EDITOR
m_WasRenderedDuringAsyncCompilation = ShaderUtil.anythingCompiling;
#endif
m_WasRenderedSinceLastOnDemandRequest = true;
wasRenderedAfterOnEnable = true;
hasEverRendered = true;
}
// API
///
/// Prepare the probe for culling.
/// You should call this method when you update the parameters during runtime.
///
public virtual void PrepareCulling() {}
///
/// Requests that Unity renders this Reflection Probe during the next update.
///
///
/// If the Reflection Probe uses mode, Unity renders the probe the next time the probe influences a Camera rendering.
///
/// If the Reflection Probe doesn't have an attached component, calling this function has no effect.
///
/// Note: If any part of a Camera's frustum intersects a Reflection Probe's influence volume, the Reflection Probe influences the Camera.
///
public void RequestRenderNextUpdate() => m_WasRenderedSinceLastOnDemandRequest = false;
// Forces the re-rendering for both OnDemand and OnEnable
internal void ForceRenderingNextUpdate()
{
m_WasRenderedSinceLastOnDemandRequest = false;
wasRenderedAfterOnEnable = false;
}
void UpdateProbeName()
{
if (settings.type == ProbeSettings.ProbeType.ReflectionProbe)
{
for (int i = 0; i < 6; i++)
probeName[i] = $"Reflection Probe RenderCamera ({name}: {(CubemapFace)i})";
}
else
{
probeName[0] = $"Planar Probe RenderCamera ({name})";
}
}
void OnEnable()
{
wasRenderedAfterOnEnable = false;
PrepareCulling();
HDProbeSystem.RegisterProbe(this);
UpdateProbeName();
#if UNITY_EDITOR
// Moving the garbage outside of the render loop:
UnityEditor.EditorApplication.hierarchyChanged += UpdateProbeName;
#endif
}
void OnDisable()
{
HDProbeSystem.UnregisterProbe(this);
#if UNITY_EDITOR
UnityEditor.EditorApplication.hierarchyChanged -= UpdateProbeName;
#endif
}
void OnValidate()
{
HDProbeSystem.UnregisterProbe(this);
if (isActiveAndEnabled)
{
PrepareCulling();
HDProbeSystem.RegisterProbe(this);
}
}
}
}