2021-09-09 20:42:29 -04:00

612 lines
26 KiB
HLSL

//-----------------------------------------------------------------------------
// SurfaceData and BSDFData
//-----------------------------------------------------------------------------
// SurfaceData is defined in Hair.cs which generates Hair.cs.hlsl
#include "Hair.cs.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Material/SubsurfaceScattering/SubsurfaceScattering.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Material/NormalBuffer.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/VolumeRendering.hlsl"
//-----------------------------------------------------------------------------
// Texture and constant buffer declaration
//-----------------------------------------------------------------------------
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Material/LTCAreaLight/LTCAreaLight.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Material/PreIntegratedFGD/PreIntegratedFGD.hlsl"
#define DEFAULT_HAIR_SPECULAR_VALUE 0.0465 // Hair is IOR 1.55
//-----------------------------------------------------------------------------
// Helper functions/variable specific to this material
//-----------------------------------------------------------------------------
float4 GetDiffuseOrDefaultColor(BSDFData bsdfData, float replace)
{
return float4(bsdfData.diffuseColor, 0.0);
}
float3 GetNormalForShadowBias(BSDFData bsdfData)
{
#if _USE_LIGHT_FACING_NORMAL
// TODO: should probably bias towards the light for splines...
return bsdfData.geomNormalWS;
#else
return bsdfData.geomNormalWS;
#endif
}
float GetAmbientOcclusionForMicroShadowing(BSDFData bsdfData)
{
// Don't do micro shadow for hair, don't really make sense
return 1.0;
}
// This function is use to help with debugging and must be implemented by any lit material
// Implementer must take into account what are the current override component and
// adjust SurfaceData properties accordingdly
void ApplyDebugToSurfaceData(float3x3 tangentToWorld, inout SurfaceData surfaceData)
{
#ifdef DEBUG_DISPLAY
// NOTE: THe _Debug* uniforms come from /HDRP/Debug/DebugDisplay.hlsl
// Override value if requested by user
// this can be use also in case of debug lighting mode like diffuse only
bool overrideAlbedo = _DebugLightingAlbedo.x != 0.0;
bool overrideSmoothness = _DebugLightingSmoothness.x != 0.0;
bool overrideNormal = _DebugLightingNormal.x != 0.0;
bool overrideAO = _DebugLightingAmbientOcclusion.x != 0.0;
if (overrideAlbedo)
{
float3 overrideAlbedoValue = _DebugLightingAlbedo.yzw;
surfaceData.diffuseColor = overrideAlbedoValue;
}
if (overrideSmoothness)
{
float overrideSmoothnessValue = _DebugLightingSmoothness.y;
surfaceData.perceptualSmoothness = overrideSmoothnessValue;
surfaceData.secondaryPerceptualSmoothness = overrideSmoothnessValue;
}
if (overrideNormal)
{
surfaceData.normalWS = tangentToWorld[2];
}
if (overrideAO)
{
float overrideAOValue = _DebugLightingAmbientOcclusion.y;
surfaceData.ambientOcclusion = overrideAOValue;
}
if (_DebugFullScreenMode == FULLSCREENDEBUGMODE_VALIDATE_DIFFUSE_COLOR)
{
surfaceData.diffuseColor = pbrDiffuseColorValidate(surfaceData.diffuseColor, DEFAULT_HAIR_SPECULAR_VALUE, false, false).xyz;
}
else if (_DebugFullScreenMode == FULLSCREENDEBUGMODE_VALIDATE_SPECULAR_COLOR)
{
surfaceData.diffuseColor = pbrSpecularColorValidate(surfaceData.diffuseColor, DEFAULT_HAIR_SPECULAR_VALUE, false, false).xyz;
}
#endif
}
// Note: This will be available and used in ShaderPassForward.hlsl since in Hair.shader,
// just before including the core code of the pass (ShaderPassForward.hlsl) we include
// Material.hlsl (or Lighting.hlsl which includes it) which in turn includes us,
// Hair.shader, via the #if defined(UNITY_MATERIAL_*) glue mechanism.
void ApplyDebugToBSDFData(inout BSDFData bsdfData)
{
#ifdef DEBUG_DISPLAY
// Override value if requested by user
// this can be use also in case of debug lighting mode like specular only
bool overrideSpecularColor = _DebugLightingSpecularColor.x != 0.0;
if (overrideSpecularColor)
{
float3 overrideSpecularColor = _DebugLightingSpecularColor.yzw;
bsdfData.fresnel0 = overrideSpecularColor;
}
#endif
}
NormalData ConvertSurfaceDataToNormalData(SurfaceData surfaceData)
{
NormalData normalData;
normalData.normalWS = surfaceData.normalWS;
normalData.perceptualRoughness = PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothness);
return normalData;
}
//-----------------------------------------------------------------------------
// conversion function for forward
//-----------------------------------------------------------------------------
float RoughnessToBlinnPhongSpecularExponent(float roughness)
{
return clamp(2 * rcp(roughness * roughness) - 2, FLT_EPS, rcp(FLT_EPS));
}
BSDFData ConvertSurfaceDataToBSDFData(uint2 positionSS, SurfaceData surfaceData)
{
BSDFData bsdfData;
ZERO_INITIALIZE(BSDFData, bsdfData);
// IMPORTANT: All enable flags are statically know at compile time, so the compiler can do compile time optimization
bsdfData.materialFeatures = surfaceData.materialFeatures;
bsdfData.ambientOcclusion = surfaceData.ambientOcclusion;
bsdfData.specularOcclusion = surfaceData.specularOcclusion;
bsdfData.diffuseColor = surfaceData.diffuseColor;
bsdfData.normalWS = surfaceData.normalWS;
bsdfData.geomNormalWS = surfaceData.geomNormalWS;
bsdfData.perceptualRoughness = PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothness);
// This value will be override by the value in diffusion profile
bsdfData.fresnel0 = DEFAULT_HAIR_SPECULAR_VALUE;
bsdfData.transmittance = surfaceData.transmittance;
bsdfData.rimTransmissionIntensity = surfaceData.rimTransmissionIntensity;
// This is the hair tangent (which represents the hair strand direction, root to tip).
bsdfData.hairStrandDirectionWS = surfaceData.hairStrandDirectionWS;
// Kajiya kay
if (HasFlag(surfaceData.materialFeatures, MATERIALFEATUREFLAGS_HAIR_KAJIYA_KAY))
{
bsdfData.secondaryPerceptualRoughness = PerceptualSmoothnessToPerceptualRoughness(surfaceData.secondaryPerceptualSmoothness);
bsdfData.specularTint = surfaceData.specularTint;
bsdfData.secondarySpecularTint = surfaceData.secondarySpecularTint;
bsdfData.specularShift = surfaceData.specularShift;
bsdfData.secondarySpecularShift = surfaceData.secondarySpecularShift;
float roughness1 = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
float roughness2 = PerceptualRoughnessToRoughness(bsdfData.secondaryPerceptualRoughness);
bsdfData.specularExponent = RoughnessToBlinnPhongSpecularExponent(roughness1);
bsdfData.secondarySpecularExponent = RoughnessToBlinnPhongSpecularExponent(roughness2);
bsdfData.anisotropy = 0.8; // For hair we fix the anisotropy
}
ApplyDebugToBSDFData(bsdfData);
return bsdfData;
}
//-----------------------------------------------------------------------------
// Debug method (use to display values)
//-----------------------------------------------------------------------------
// This function call the generated debug function and allow to override the debug output if needed
void GetSurfaceDataDebug(uint paramId, SurfaceData surfaceData, inout float3 result, inout bool needLinearToSRGB)
{
GetGeneratedSurfaceDataDebug(paramId, surfaceData, result, needLinearToSRGB);
// Overide debug value output to be more readable
switch (paramId)
{
case DEBUGVIEW_HAIR_SURFACEDATA_NORMAL_VIEW_SPACE:
// Convert to view space
{
float3 vsNormal = TransformWorldToViewDir(surfaceData.normalWS);
result = IsNormalized(vsNormal) ? vsNormal * 0.5 + 0.5 : float3(1.0, 0.0, 0.0);
break;
}
case DEBUGVIEW_HAIR_SURFACEDATA_GEOMETRIC_NORMAL_VIEW_SPACE:
{
float3 vsGeomNormal = TransformWorldToViewDir(surfaceData.geomNormalWS);
result = IsNormalized(vsGeomNormal) ? vsGeomNormal * 0.5 + 0.5 : float3(1.0, 0.0, 0.0);
break;
}
}
}
// This function call the generated debug function and allow to override the debug output if needed
void GetBSDFDataDebug(uint paramId, BSDFData bsdfData, inout float3 result, inout bool needLinearToSRGB)
{
GetGeneratedBSDFDataDebug(paramId, bsdfData, result, needLinearToSRGB);
// Overide debug value output to be more readable
switch (paramId)
{
case DEBUGVIEW_HAIR_BSDFDATA_NORMAL_VIEW_SPACE:
// Convert to view space
{
float3 vsNormal = TransformWorldToViewDir(bsdfData.normalWS);
result = IsNormalized(vsNormal) ? vsNormal * 0.5 + 0.5 : float3(1.0, 0.0, 0.0);
break;
}
case DEBUGVIEW_HAIR_BSDFDATA_GEOMETRIC_NORMAL_VIEW_SPACE:
{
float3 vsGeomNormal = TransformWorldToViewDir(bsdfData.geomNormalWS);
result = IsNormalized(vsGeomNormal) ? vsGeomNormal * 0.5 + 0.5 : float3(1.0, 0.0, 0.0);
break;
}
}
}
void GetPBRValidatorDebug(SurfaceData surfaceData, inout float3 result)
{
result = surfaceData.diffuseColor;
}
//-----------------------------------------------------------------------------
// PreLightData
//
// Make sure we respect naming conventions to reuse ShaderPassForward as is,
// ie struct (even if opaque to the ShaderPassForward) name is PreLightData,
// GetPreLightData prototype.
//-----------------------------------------------------------------------------
// Precomputed lighting data to send to the various lighting functions
struct PreLightData
{
float NdotV; // Could be negative due to normal mapping, use ClampNdotV()
// IBL
float3 iblR; // Reflected specular direction, used for IBL in EvaluateBSDF_Env()
float iblPerceptualRoughness;
float3 specularFGD; // Store preintegrated BSDF for both specular and diffuse
float diffuseFGD;
};
//
// ClampRoughness helper specific to this material
//
void ClampRoughness(inout PreLightData preLightData, inout BSDFData bsdfData, float minRoughness)
{
bsdfData.perceptualRoughness = max(RoughnessToPerceptualRoughness(minRoughness), bsdfData.perceptualRoughness);
bsdfData.secondaryPerceptualRoughness = max(RoughnessToPerceptualRoughness(minRoughness), bsdfData.secondaryPerceptualRoughness);
}
// This function is call to precompute heavy calculation before lightloop
PreLightData GetPreLightData(float3 V, PositionInputs posInput, inout BSDFData bsdfData)
{
PreLightData preLightData;
// Don't init to zero to allow to track warning about uninitialized data
#if _USE_LIGHT_FACING_NORMAL
float3 N = ComputeViewFacingNormal(V, bsdfData.hairStrandDirectionWS);
#else
float3 N = bsdfData.normalWS;
#endif
preLightData.NdotV = dot(N, V);
float clampedNdotV = ClampNdotV(preLightData.NdotV);
float unused;
if (HasFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_HAIR_KAJIYA_KAY))
{
// Note: For Kajiya hair we currently rely on a single cubemap sample instead of two, as in practice smoothness of both lobe aren't too far from each other.
// and we take smoothness of the secondary lobe as it is often more rough (it is the colored one).
preLightData.iblPerceptualRoughness = bsdfData.secondaryPerceptualRoughness;
// TODO: adjust for Blinn-Phong here?
GetPreIntegratedFGDGGXAndDisneyDiffuse(clampedNdotV, preLightData.iblPerceptualRoughness, bsdfData.fresnel0, preLightData.specularFGD, preLightData.diffuseFGD, unused);
// We used lambert for hair for now
// Note: this normalization term is wrong, correct one is (1/(Pi^2)).
preLightData.diffuseFGD = 1.0;
}
else
{
preLightData.iblPerceptualRoughness = bsdfData.perceptualRoughness;
preLightData.specularFGD = 1.0;
preLightData.diffuseFGD = 1.0;
}
// Stretch hack... Copy-pasted from GGX, ALU-optimized for hair.
// float3 iblN = normalize(lerp(bsdfData.normalWS, N, bsdfData.anisotropy));
float3 iblN = N;
preLightData.iblR = reflect(-V, iblN);
preLightData.iblPerceptualRoughness *= saturate(1.2 - abs(bsdfData.anisotropy));
return preLightData;
}
//-----------------------------------------------------------------------------
// bake lighting function
//-----------------------------------------------------------------------------
// This define allow to say that we implement a ModifyBakedDiffuseLighting function to be call in PostInitBuiltinData
#define MODIFY_BAKED_DIFFUSE_LIGHTING
void ModifyBakedDiffuseLighting(float3 V, PositionInputs posInput, PreLightData preLightData, BSDFData bsdfData, inout BuiltinData builtinData)
{
// Add GI transmission contribution to bakeDiffuseLighting, we then drop backBakeDiffuseLighting (i.e it is not used anymore, this save VGPR)
{
// TODO: disabled until further notice (not clear how to handle occlusion).
//builtinData.bakeDiffuseLighting += builtinData.backBakeDiffuseLighting * bsdfData.transmittance;
}
// Premultiply (back) bake diffuse lighting information with diffuse pre-integration
builtinData.bakeDiffuseLighting *= preLightData.diffuseFGD * bsdfData.diffuseColor;
}
//-----------------------------------------------------------------------------
// light transport functions
//-----------------------------------------------------------------------------
LightTransportData GetLightTransportData(SurfaceData surfaceData, BuiltinData builtinData, BSDFData bsdfData)
{
LightTransportData lightTransportData;
// DiffuseColor for lightmapping
lightTransportData.diffuseColor = bsdfData.diffuseColor;
lightTransportData.emissiveColor = builtinData.emissiveColor;
return lightTransportData;
}
//-----------------------------------------------------------------------------
// LightLoop related function (Only include if required)
// HAS_LIGHTLOOP is define in Lighting.hlsl
//-----------------------------------------------------------------------------
#ifdef HAS_LIGHTLOOP
//-----------------------------------------------------------------------------
// BSDF share between directional light, punctual light and area light (reference)
//-----------------------------------------------------------------------------
bool IsNonZeroBSDF(float3 V, float3 L, PreLightData preLightData, BSDFData bsdfData)
{
return true; // Due to either reflection or transmission being always active
}
CBSDF EvaluateBSDF(float3 V, float3 L, PreLightData preLightData, BSDFData bsdfData)
{
CBSDF cbsdf;
ZERO_INITIALIZE(CBSDF, cbsdf);
float3 T = bsdfData.hairStrandDirectionWS;
float3 N = bsdfData.normalWS;
#if _USE_LIGHT_FACING_NORMAL
// The Kajiya-Kay model has a "built-in" transmission, and the 'NdotL' is always positive.
float cosTL = dot(T, L);
float sinTL = sqrt(saturate(1.0 - cosTL * cosTL));
float NdotL = sinTL; // Corresponds to the cosine w.r.t. the light-facing normal
#else
// Double-sided Lambert.
float NdotL = dot(N, L);
#endif
float NdotV = preLightData.NdotV;
float clampedNdotV = ClampNdotV(NdotV);
float clampedNdotL = saturate(NdotL);
float LdotV, NdotH, LdotH, invLenLV;
GetBSDFAngle(V, L, NdotL, NdotV, LdotV, NdotH, LdotH, invLenLV);
if (HasFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_HAIR_KAJIYA_KAY))
{
float3 t1 = ShiftTangent(T, N, bsdfData.specularShift);
float3 t2 = ShiftTangent(T, N, bsdfData.secondarySpecularShift);
float3 H = (L + V) * invLenLV;
// Balancing energy between lobes, as well as between diffuse and specular is left to artists.
float3 hairSpec1 = bsdfData.specularTint * D_KajiyaKay(t1, H, bsdfData.specularExponent);
float3 hairSpec2 = bsdfData.secondarySpecularTint * D_KajiyaKay(t2, H, bsdfData.secondarySpecularExponent);
float3 F = F_Schlick(bsdfData.fresnel0, LdotH);
#if _USE_LIGHT_FACING_NORMAL
// See "Analytic Tangent Irradiance Environment Maps for Anisotropic Surfaces".
cbsdf.diffR = rcp(PI * PI) * clampedNdotL;
// Transmission is built into the model, and it's not exactly clear how to split it.
cbsdf.diffT = 0;
#else
// Double-sided Lambert.
cbsdf.diffR = Lambert() * clampedNdotL;
#endif
// Bypass the normal map...
float geomNdotV = dot(bsdfData.geomNormalWS, V);
// G = NdotL * NdotV.
cbsdf.specR = 0.25 * F * (hairSpec1 + hairSpec2) * clampedNdotL * saturate(geomNdotV * FLT_MAX);
// Yibing's and Morten's hybrid scatter model hack.
float scatterFresnel1 = pow(saturate(-LdotV), 9.0) * pow(saturate(1.0 - geomNdotV * geomNdotV), 12.0);
float scatterFresnel2 = saturate(PositivePow((1.0 - geomNdotV), 20.0));
cbsdf.specT = scatterFresnel1 + bsdfData.rimTransmissionIntensity * scatterFresnel2;
}
return cbsdf;
}
//-----------------------------------------------------------------------------
// Surface shading (all light types) below
//-----------------------------------------------------------------------------
// Hair used precomputed transmittance, no thick transmittance required
#define MATERIAL_INCLUDE_PRECOMPUTED_TRANSMISSION
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightEvaluation.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Material/MaterialEvaluation.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/SurfaceShading.hlsl"
//-----------------------------------------------------------------------------
// EvaluateBSDF_Directional
//-----------------------------------------------------------------------------
DirectLighting EvaluateBSDF_Directional(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput, PreLightData preLightData,
DirectionalLightData lightData, BSDFData bsdfData,
BuiltinData builtinData)
{
return ShadeSurface_Directional(lightLoopContext, posInput, builtinData,
preLightData, lightData, bsdfData, V);
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Punctual (supports spot, point and projector lights)
//-----------------------------------------------------------------------------
DirectLighting EvaluateBSDF_Punctual(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData, BSDFData bsdfData, BuiltinData builtinData)
{
return ShadeSurface_Punctual(lightLoopContext, posInput, builtinData,
preLightData, lightData, bsdfData, V);
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Line
//-----------------------------------------------------------------------------
DirectLighting EvaluateBSDF_Line( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData, BSDFData bsdfData, BuiltinData builtinData)
{
DirectLighting lighting;
ZERO_INITIALIZE(DirectLighting, lighting);
// TODO
return lighting;
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Rect
//-----------------------------------------------------------------------------
DirectLighting EvaluateBSDF_Rect( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData, BSDFData bsdfData, BuiltinData builtinData)
{
DirectLighting lighting;
ZERO_INITIALIZE(DirectLighting, lighting);
// TODO
return lighting;
}
DirectLighting EvaluateBSDF_Area(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData,
BSDFData bsdfData, BuiltinData builtinData)
{
if (lightData.lightType == GPULIGHTTYPE_TUBE)
{
return EvaluateBSDF_Line(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, builtinData);
}
else
{
return EvaluateBSDF_Rect(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, builtinData);
}
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_SSLighting for screen space lighting
// ----------------------------------------------------------------------------
IndirectLighting EvaluateBSDF_ScreenSpaceReflection(PositionInputs posInput,
PreLightData preLightData,
BSDFData bsdfData,
inout float reflectionHierarchyWeight)
{
IndirectLighting lighting;
ZERO_INITIALIZE(IndirectLighting, lighting);
// TODO: this texture is sparse (mostly black). Can we avoid reading every texel? How about using Hi-S?
float4 ssrLighting = LOAD_TEXTURE2D_X(_SsrLightingTexture, posInput.positionSS);
InversePreExposeSsrLighting(ssrLighting);
// Apply the weight on the ssr contribution (if required)
ApplyScreenSpaceReflectionWeight(ssrLighting);
// TODO: we should multiply all indirect lighting by the FGD value only ONCE.
lighting.specularReflected = ssrLighting.rgb * preLightData.specularFGD;
reflectionHierarchyWeight = ssrLighting.a;
return lighting;
}
IndirectLighting EvaluateBSDF_ScreenspaceRefraction(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, BSDFData bsdfData,
EnvLightData envLightData,
inout float hierarchyWeight)
{
IndirectLighting lighting;
ZERO_INITIALIZE(IndirectLighting, lighting);
// TODO
return lighting;
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Env
// ----------------------------------------------------------------------------
// _preIntegratedFGD and _CubemapLD are unique for each BRDF
IndirectLighting EvaluateBSDF_Env( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, EnvLightData lightData, BSDFData bsdfData,
int influenceShapeType, int GPUImageBasedLightingType,
inout float hierarchyWeight)
{
IndirectLighting lighting;
ZERO_INITIALIZE(IndirectLighting, lighting);
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFRACTION)
return lighting;
float3 envLighting;
float3 positionWS = posInput.positionWS;
float weight = 1.0;
float3 R = preLightData.iblR;
// Note: using influenceShapeType and projectionShapeType instead of (lightData|proxyData).shapeType allow to make compiler optimization in case the type is know (like for sky)
float intersectionDistance = EvaluateLight_EnvIntersection(positionWS, bsdfData.normalWS, lightData, influenceShapeType, R, weight);
float4 preLD = SampleEnvWithDistanceBaseRoughness(lightLoopContext, posInput, lightData, R, preLightData.iblPerceptualRoughness, intersectionDistance);
weight *= preLD.a; // Used by planar reflection to discard pixel
envLighting = preLightData.specularFGD * preLD.rgb;
if (HasFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_HAIR_KAJIYA_KAY))
{
// We tint the HDRI with the secondary lob specular as it is more representatative of indirect lighting on hair.
envLighting *= bsdfData.secondarySpecularTint;
}
UpdateLightingHierarchyWeights(hierarchyWeight, weight);
envLighting *= weight * lightData.multiplier;
lighting.specularReflected = envLighting;
return lighting;
}
//-----------------------------------------------------------------------------
// PostEvaluateBSDF
// ----------------------------------------------------------------------------
void PostEvaluateBSDF( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, BSDFData bsdfData, BuiltinData builtinData, AggregateLighting lighting,
out LightLoopOutput lightLoopOutput)
{
AmbientOcclusionFactor aoFactor;
GetScreenSpaceAmbientOcclusionMultibounce(posInput.positionSS, preLightData.NdotV, bsdfData.perceptualRoughness, bsdfData.ambientOcclusion, bsdfData.specularOcclusion, bsdfData.diffuseColor, bsdfData.fresnel0, aoFactor);
ApplyAmbientOcclusionFactor(aoFactor, builtinData, lighting);
// Apply the albedo to the direct diffuse lighting (only once). The indirect (baked)
// diffuse lighting has already multiply the albedo in ModifyBakedDiffuseLighting().
lightLoopOutput.diffuseLighting = bsdfData.diffuseColor * lighting.direct.diffuse + builtinData.bakeDiffuseLighting + builtinData.emissiveColor;
lightLoopOutput.specularLighting = lighting.direct.specular + lighting.indirect.specularReflected;
#ifdef DEBUG_DISPLAY
PostEvaluateBSDFDebugDisplay(aoFactor, builtinData, lighting, bsdfData.diffuseColor, lightLoopOutput);
#endif
}
#endif // #ifdef HAS_LIGHTLOOP