using System;
using System.Collections.Generic;
using UnityEngine.Serialization;
namespace UnityEngine.Rendering.HighDefinition
{
///
/// Holds the physical settings set on cameras.
///
[Serializable]
public class HDPhysicalCamera
{
///
/// The minimum allowed aperture.
///
public const float kMinAperture = 0.7f;
///
/// The maximum allowed aperture.
///
public const float kMaxAperture = 32f;
///
/// The minimum blade count for the aperture diaphragm.
///
public const int kMinBladeCount = 3;
///
/// The maximum blade count for the aperture diaphragm.
///
public const int kMaxBladeCount = 11;
// Camera body
[SerializeField][Min(1f)] int m_Iso = 200;
[SerializeField][Min(0f)] float m_ShutterSpeed = 1f / 200f;
// Lens
// Note: focalLength is already defined in the regular camera component
[SerializeField][Range(kMinAperture, kMaxAperture)] float m_Aperture = 16f;
// Aperture shape
[SerializeField][Range(kMinBladeCount, kMaxBladeCount)] int m_BladeCount = 5;
[SerializeField] Vector2 m_Curvature = new Vector2(2f, 11f);
[SerializeField][Range(0f, 1f)] float m_BarrelClipping = 0.25f;
[SerializeField][Range(-1f, 1f)] float m_Anamorphism = 0f;
///
/// The sensor sensitivity (ISO).
///
public int iso
{
get => m_Iso;
set => m_Iso = Mathf.Max(value, 1);
}
///
/// The exposure time, in second.
///
public float shutterSpeed
{
get => m_ShutterSpeed;
set => m_ShutterSpeed = Mathf.Max(value, 0f);
}
///
/// The aperture number, in f-stop.
///
public float aperture
{
get => m_Aperture;
set => m_Aperture = Mathf.Clamp(value, kMinAperture, kMaxAperture);
}
///
/// The number of diaphragm blades.
///
public int bladeCount
{
get => m_BladeCount;
set => m_BladeCount = Mathf.Clamp(value, kMinBladeCount, kMaxBladeCount);
}
///
/// Maps an aperture range to blade curvature.
///
public Vector2 curvature
{
get => m_Curvature;
set
{
m_Curvature.x = Mathf.Max(value.x, kMinAperture);
m_Curvature.y = Mathf.Min(value.y, kMaxAperture);
}
}
///
/// The strength of the "cat eye" effect on bokeh (optical vignetting).
///
public float barrelClipping
{
get => m_BarrelClipping;
set => m_BarrelClipping = Mathf.Clamp01(value);
}
///
/// Stretches the sensor to simulate an anamorphic look. Positive values distort the Camera
/// vertically, negative will distort the Camera horizontally.
///
public float anamorphism
{
get => m_Anamorphism;
set => m_Anamorphism = Mathf.Clamp(value, -1f, 1f);
}
///
/// Copies the settings of this instance to another instance.
///
/// The instance to copy the settings to.
public void CopyTo(HDPhysicalCamera c)
{
c.iso = iso;
c.shutterSpeed = shutterSpeed;
c.aperture = aperture;
c.bladeCount = bladeCount;
c.curvature = curvature;
c.barrelClipping = barrelClipping;
c.anamorphism = anamorphism;
}
}
///
/// Additional component that holds HDRP specific parameters for Cameras.
///
[HelpURL(Documentation.baseURL + Documentation.version + Documentation.subURL + "HDRP-Camera" + Documentation.endURL)]
[AddComponentMenu("")] // Hide in menu
[DisallowMultipleComponent, ExecuteAlways]
[RequireComponent(typeof(Camera))]
public partial class HDAdditionalCameraData : MonoBehaviour, IFrameSettingsHistoryContainer
{
///
/// How the camera should handle vertically flipping the frame at the end of rendering.
///
public enum FlipYMode
{
/// Handle flip automatically.
Automatic,
/// For vertical flip.
ForceFlipY
}
///
/// Type of buffers that can be accessed for this camera.
///
[Flags]
public enum BufferAccessType
{
/// Depth buffer.
Depth = 1,
/// Normal buffer.
Normal = 1 << 1,
/// Color buffer.
Color = 1 << 2
}
///
/// Structure used to access graphics buffers for this camera.
///
public struct BufferAccess
{
internal BufferAccessType bufferAccess;
internal void Reset()
{
bufferAccess = 0;
}
///
/// Request access to a list of buffer in the form of a bitfield.
///
/// List of buffers that need to be accessed.
public void RequestAccess(BufferAccessType flags)
{
bufferAccess |= flags;
}
}
// The light culling use standard projection matrices (non-oblique)
// If the user overrides the projection matrix with an oblique one
// He must also provide a callback to get the equivalent non oblique for the culling
///
/// Returns the non oblique projection matrix for a particular camera.
///
/// Requested camera.
/// The non oblique projection matrix for a particular camera.
public delegate Matrix4x4 NonObliqueProjectionGetter(Camera camera);
Camera m_Camera;
///
/// Clear mode for the camera background.
///
public enum ClearColorMode
{
/// Clear the background with the sky.
Sky,
/// Clear the background with a constant color.
Color,
/// Don't clear the background.
None
};
///
/// Anti-aliasing mode.
///
public enum AntialiasingMode
{
/// No Anti-aliasing.
None,
/// FXAA.
FastApproximateAntialiasing,
/// Temporal anti-aliasing.
TemporalAntialiasing,
/// SMAA.
SubpixelMorphologicalAntiAliasing
}
///
/// SMAA quality level.
///
public enum SMAAQualityLevel
{
/// Low quality.
Low,
/// Medium quality.
Medium,
/// High quality.
High
}
///
/// TAA quality level.
///
public enum TAAQualityLevel
{
/// Low quality.
Low,
/// Medium quality.
Medium,
/// High quality.
High
}
/// Clear mode for the camera background.
public ClearColorMode clearColorMode = ClearColorMode.Sky;
/// HDR color used for clearing the camera background.
[ColorUsage(true, true)]
public Color backgroundColorHDR = new Color(0.025f, 0.07f, 0.19f, 0.0f);
/// Clear depth as well as color.
public bool clearDepth = true;
/// Layer mask used to select which volumes will influence this camera.
[Tooltip("LayerMask HDRP uses for Volume interpolation for this Camera.")]
public LayerMask volumeLayerMask = 1;
/// Optional transform override for the position where volumes are interpolated.
public Transform volumeAnchorOverride;
/// Anti-aliasing mode.
public AntialiasingMode antialiasing = AntialiasingMode.None;
/// Quality of the anti-aliasing when using SMAA.
public SMAAQualityLevel SMAAQuality = SMAAQualityLevel.High;
/// Use dithering to filter out minor banding.
public bool dithering = false;
/// Use a pass to eliminate NaNs contained in the color buffer before post-processing.
public bool stopNaNs = false;
/// Strength of the sharpening component of temporal anti-aliasing.
[Range(0, 2)]
public float taaSharpenStrength = 0.5f;
/// Quality of the anti-aliasing when using TAA.
public TAAQualityLevel TAAQuality = TAAQualityLevel.Medium;
/// Strength of the sharpening of the history sampled for TAA.
[Range(0, 1)]
public float taaHistorySharpening = 0.35f;
/// Drive the anti-flicker mechanism. With high values flickering might be reduced, but it can lead to more ghosting or disocclusion artifacts.
[Range(0.0f, 1.0f)]
public float taaAntiFlicker = 0.5f;
/// Larger is this value, more likely history will be rejected when current and reprojected history motion vector differ by a substantial amount.
/// Larger values can decrease ghosting but will also reintroduce aliasing on the aforementioned cases.
[Range(0.0f, 1.0f)]
public float taaMotionVectorRejection = 0.0f;
/// When enabled, ringing artifacts (dark or strangely saturated edges) caused by history sharpening will be improved. This comes at a potential loss of sharpness upon motion.
public bool taaAntiHistoryRinging = false;
/// Physical camera parameters.
public HDPhysicalCamera physicalParameters = new HDPhysicalCamera();
/// Vertical flip mode.
public FlipYMode flipYMode;
/// Enable XR rendering.
public bool xrRendering = true;
/// Skips rendering settings to directly render in fullscreen (Useful for video).
[Tooltip("Skips rendering settings to directly render in fullscreen (Useful for video).")]
public bool fullscreenPassthrough = false;
/// Allows dynamic resolution on buffers linked to this camera.
[Tooltip("Allows dynamic resolution on buffers linked to this camera.")]
public bool allowDynamicResolution = false;
/// Allows you to override the default frame settings for this camera.
[Tooltip("Allows you to override the default settings for this camera.")]
public bool customRenderingSettings = false;
/// Invert face culling.
public bool invertFaceCulling = false;
/// Probe layer mask.
public LayerMask probeLayerMask = ~0;
/// Enable to retain history buffers even if the camera is disabled.
public bool hasPersistentHistory = false;
/// Event used to override HDRP rendering for this particular camera.
public event Action customRender;
/// True if any Custom Render event is registered for this camera.
public bool hasCustomRender { get { return customRender != null; } }
///
/// Delegate used to request access to various buffers of this camera.
///
/// Ref to a BufferAccess structure on which users should specify which buffer(s) they need.
public delegate void RequestAccessDelegate(ref BufferAccess bufferAccess);
/// RequestAccessDelegate used to request access to various buffers of this camera.
public event RequestAccessDelegate requestGraphicsBuffer;
/// The object used as a target for centering the Exposure's Procedural Mask metering mode when target object option is set (See Exposure Volume Component).
public GameObject exposureTarget = null;
internal float probeCustomFixedExposure = 1.0f;
internal float deExposureMultiplier = 1.0f;
[SerializeField, FormerlySerializedAs("renderingPathCustomFrameSettings")]
FrameSettings m_RenderingPathCustomFrameSettings = FrameSettings.NewDefaultCamera();
/// Mask specifying which frame settings are overridden when using custom frame settings.
public FrameSettingsOverrideMask renderingPathCustomFrameSettingsOverrideMask;
/// When using default frame settings, specify which type of frame settings to use.
public FrameSettingsRenderType defaultFrameSettings;
/// Custom frame settings.
public ref FrameSettings renderingPathCustomFrameSettings => ref m_RenderingPathCustomFrameSettings;
bool IFrameSettingsHistoryContainer.hasCustomFrameSettings
=> customRenderingSettings;
FrameSettingsOverrideMask IFrameSettingsHistoryContainer.frameSettingsMask
=> renderingPathCustomFrameSettingsOverrideMask;
FrameSettings IFrameSettingsHistoryContainer.frameSettings
=> m_RenderingPathCustomFrameSettings;
FrameSettingsHistory m_RenderingPathHistory = new FrameSettingsHistory()
{
defaultType = FrameSettingsRenderType.Camera
};
FrameSettingsHistory IFrameSettingsHistoryContainer.frameSettingsHistory
{
get => m_RenderingPathHistory;
set => m_RenderingPathHistory = value;
}
string IFrameSettingsHistoryContainer.panelName
=> m_CameraRegisterName;
///
/// .
///
/// .
Action IDebugData.GetReset()
//caution: we actually need to retrieve the right
//m_FrameSettingsHistory as it is a struct so no direct
// => m_FrameSettingsHistory.TriggerReset
=> () => m_RenderingPathHistory.TriggerReset();
internal ProfilingSampler profilingSampler;
AOVRequestDataCollection m_AOVRequestDataCollection = new AOVRequestDataCollection(null);
/// Set AOV requests to use.
/// Describes the requests to execute.
///
///
/// using System.Collections.Generic;
/// using UnityEngine;
/// using UnityEngine.Rendering;
/// using UnityEngine.Rendering.HighDefinition;
/// using UnityEngine.Rendering.HighDefinition.Attributes;
///
/// [ExecuteAlways]
/// [RequireComponent(typeof(Camera))]
/// [RequireComponent(typeof(HDAdditionalCameraData))]
/// public class SetupAOVCallbacks : MonoBehaviour
/// {
/// private static RTHandle m_ColorRT;
///
/// [SerializeField] private Texture m_Target;
/// [SerializeField] private DebugFullScreen m_DebugFullScreen;
/// [SerializeField] private DebugLightFilterMode m_DebugLightFilter;
/// [SerializeField] private MaterialSharedProperty m_MaterialSharedProperty;
/// [SerializeField] private LightingProperty m_LightingProperty;
/// [SerializeField] private AOVBuffers m_BuffersToCopy;
/// [SerializeField] private List<GameObject> m_IncludedLights;
///
///
/// void OnEnable()
/// {
/// var aovRequest = new AOVRequest(AOVRequest.NewDefault())
/// .SetLightFilter(m_DebugLightFilter);
/// if (m_DebugFullScreen != DebugFullScreen.None)
/// aovRequest = aovRequest.SetFullscreenOutput(m_DebugFullScreen);
/// if (m_MaterialSharedProperty != MaterialSharedProperty.None)
/// aovRequest = aovRequest.SetFullscreenOutput(m_MaterialSharedProperty);
/// if (m_LightingProperty != LightingProperty.None)
/// aovRequest = aovRequest.SetFullscreenOutput(m_LightingProperty);
///
/// var add = GetComponent<HDAdditionalCameraData>();
/// add.SetAOVRequests(
/// new AOVRequestBuilder()
/// .Add(
/// aovRequest,
/// bufferId => m_ColorRT ?? (m_ColorRT = RTHandles.Alloc(512, 512)),
/// m_IncludedLights.Count > 0 ? m_IncludedLights : null,
/// new []{ m_BuffersToCopy },
/// (cmd, textures, properties) =>
/// {
/// if (m_Target != null)
/// cmd.Blit(textures[0], m_Target);
/// })
/// .Build()
/// );
/// }
///
/// private void OnGUI()
/// {
/// GUI.DrawTexture(new Rect(10, 10, 512, 256), m_Target);
/// }
///
/// void OnDisable()
/// {
/// var add = GetComponent<HDAdditionalCameraData>();
/// add.SetAOVRequests(null);
/// }
///
/// void OnValidate()
/// {
/// OnDisable();
/// OnEnable();
/// }
/// }
///
///
/// Example use case:
/// * Export Normals: use MaterialSharedProperty.Normals and AOVBuffers.Color
/// * Export Color before post processing: use AOVBuffers.Color
/// * Export Color after post processing: use AOVBuffers.Output
/// * Export Depth stencil: use AOVBuffers.DepthStencil
/// * Export AO: use MaterialSharedProperty.AmbientOcclusion and AOVBuffers.Color
///
public void SetAOVRequests(AOVRequestDataCollection aovRequests)
=> m_AOVRequestDataCollection = aovRequests;
///
/// Use this property to get the aov requests.
///
/// It is never null.
///
public IEnumerable aovRequests =>
m_AOVRequestDataCollection ?? (m_AOVRequestDataCollection = new AOVRequestDataCollection(null));
// Use for debug windows
// When camera name change we need to update the name in DebugWindows.
// This is the purpose of this class
bool m_IsDebugRegistered = false;
string m_CameraRegisterName;
// When we are a preview, there is no way inside Unity to make a distinction between camera preview and material preview.
// This property allow to say that we are an editor camera preview when the type is preview.
///
/// Unity support two type of preview: Camera preview and material preview. This property allow to know that we are an editor camera preview when the type is preview.
///
public bool isEditorCameraPreview { get; internal set; }
// This is use to copy data into camera for the Reset() workflow in camera editor
///
/// Copy HDAdditionalCameraData.
///
/// Component to copy to.
public void CopyTo(HDAdditionalCameraData data)
{
data.clearColorMode = clearColorMode;
data.backgroundColorHDR = backgroundColorHDR;
data.clearDepth = clearDepth;
data.customRenderingSettings = customRenderingSettings;
data.volumeLayerMask = volumeLayerMask;
data.volumeAnchorOverride = volumeAnchorOverride;
data.antialiasing = antialiasing;
data.dithering = dithering;
data.xrRendering = xrRendering;
physicalParameters.CopyTo(data.physicalParameters);
data.renderingPathCustomFrameSettings = renderingPathCustomFrameSettings;
data.renderingPathCustomFrameSettingsOverrideMask = renderingPathCustomFrameSettingsOverrideMask;
data.defaultFrameSettings = defaultFrameSettings;
data.probeCustomFixedExposure = probeCustomFixedExposure;
// We must not copy the following
//data.m_IsDebugRegistered = m_IsDebugRegistered;
//data.m_CameraRegisterName = m_CameraRegisterName;
//data.isEditorCameraPreview = isEditorCameraPreview;
}
// For custom projection matrices
// Set the proper getter
///
/// Specify a custom getter for non oblique projection matrix.
///
public NonObliqueProjectionGetter nonObliqueProjectionGetter = GeometryUtils.CalculateProjectionMatrix;
///
/// Returns the non oblique projection matrix for this camera.
///
/// Requested camera.
/// The non oblique projection matrix for this camera.
public Matrix4x4 GetNonObliqueProjection(Camera camera)
{
return nonObliqueProjectionGetter(camera);
}
void RegisterDebug()
{
if (!m_IsDebugRegistered)
{
// Note that we register FrameSettingsHistory, so manipulating FrameSettings in the Debug windows
// doesn't affect the serialized version
// Note camera's preview camera is registered with preview type but then change to game type that lead to issue.
// Do not attempt to not register them till this issue persist.
m_CameraRegisterName = name;
if (m_Camera.cameraType != CameraType.Preview && m_Camera.cameraType != CameraType.Reflection)
{
DebugDisplaySettings.RegisterCamera(this);
VolumeDebugSettings.RegisterCamera(this);
}
m_IsDebugRegistered = true;
}
}
void UnRegisterDebug()
{
if (m_IsDebugRegistered)
{
// Note camera's preview camera is registered with preview type but then change to game type that lead to issue.
// Do not attempt to not register them till this issue persist.
if (m_Camera.cameraType != CameraType.Preview && m_Camera?.cameraType != CameraType.Reflection)
{
VolumeDebugSettings.UnRegisterCamera(this);
DebugDisplaySettings.UnRegisterCamera(this);
}
m_IsDebugRegistered = false;
}
}
void OnEnable()
{
// Be sure legacy HDR option is disable on camera as it cause banding in SceneView. Yes, it is a contradiction, but well, Unity...
// When HDR option is enabled, Unity render in FP16 then convert to 8bit with a stretch copy (this cause banding as it should be convert to sRGB (or other color appropriate color space)), then do a final shader with sRGB conversion
// When LDR, unity render in 8bitSRGB, then do a final shader with sRGB conversion
// What should be done is just in our Post process we convert to sRGB and store in a linear 10bit, but require C++ change...
m_Camera = GetComponent();
if (m_Camera == null)
return;
m_Camera.allowMSAA = false; // We don't use this option in HD (it is legacy MSAA) and it produce a warning in the inspector UI if we let it
m_Camera.allowHDR = false;
RegisterDebug();
#if UNITY_EDITOR
UpdateDebugCameraName();
UnityEditor.EditorApplication.hierarchyChanged += UpdateDebugCameraName;
#endif
}
void UpdateDebugCameraName()
{
// Move the garbage generated by accessing name outside of HDRP
profilingSampler = new ProfilingSampler(HDUtils.ComputeCameraName(name));
if (name != m_CameraRegisterName)
{
UnRegisterDebug();
RegisterDebug();
}
}
void OnDisable()
{
UnRegisterDebug();
#if UNITY_EDITOR
UnityEditor.EditorApplication.hierarchyChanged -= UpdateDebugCameraName;
#endif
}
// This is called at the creation of the HD Additional Camera Data, to convert the legacy camera settings to HD
internal static void InitDefaultHDAdditionalCameraData(HDAdditionalCameraData cameraData)
{
var camera = cameraData.gameObject.GetComponent();
cameraData.clearDepth = camera.clearFlags != CameraClearFlags.Nothing;
if (camera.clearFlags == CameraClearFlags.Skybox)
cameraData.clearColorMode = ClearColorMode.Sky;
else if (camera.clearFlags == CameraClearFlags.SolidColor)
cameraData.clearColorMode = ClearColorMode.Color;
else // None
cameraData.clearColorMode = ClearColorMode.None;
}
internal void ExecuteCustomRender(ScriptableRenderContext renderContext, HDCamera hdCamera)
{
if (customRender != null)
{
customRender(renderContext, hdCamera);
}
}
internal BufferAccessType GetBufferAccess()
{
BufferAccess result = new BufferAccess();
requestGraphicsBuffer?.Invoke(ref result);
return result.bufferAccess;
}
///
/// Returns the requested graphics buffer.
/// Users should use the requestGraphicsBuffer event to make sure that the required buffers are requested first.
/// Note that depending on the current frame settings some buffers may not be available.
///
/// Type of the requested buffer.
/// Requested buffer as a RTHandle. Can be null if the buffer is not available.
public RTHandle GetGraphicsBuffer(BufferAccessType type)
{
HDCamera hdCamera = HDCamera.GetOrCreate(m_Camera);
if ((type & BufferAccessType.Color) != 0)
return hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain);
else if ((type & BufferAccessType.Depth) != 0)
return hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Depth);
else if ((type & BufferAccessType.Normal) != 0)
return hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.Normal);
else
return null;
}
}
}