Skip to content

Changed the way the filter size is decided for directional, point and spot shadows #951

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jun 17, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions com.unity.render-pipelines.high-definition/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -836,6 +836,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
- Add color clear pass while rendering XR occlusion mesh to avoid leaks.
- Only use one texture for ray traced reflection upscaling.
- Adjust the upscale radius based on the roughness value.
- DXR: Changed the way the filter size is decided for directional, point and spot shadows.

## [7.1.1] - 2019-09-05

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,6 @@ partial class HDRenderPipelineRayTracingResources : ScriptableObject
// Filtering for reflections
[Reload("Runtime/RenderPipelineResources/Texture/ReflectionKernelMapping.png")]
public Texture2D reflectionFilterMapping;
[Reload("Runtime/RenderPipelineResources/Texture/ShadowKernelMapping.asset")]
public Texture3D shadowFilterMapping;


#if UNITY_EDITOR
[UnityEditor.CustomEditor(typeof(HDRenderPipelineRayTracingResources))]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -570,6 +570,7 @@ static class HDShaderIDs
public static readonly int _DirectionalLightDirection = Shader.PropertyToID("_DirectionalLightDirection");
public static readonly int _SphereLightPosition = Shader.PropertyToID("_SphereLightPosition");
public static readonly int _SphereLightRadius = Shader.PropertyToID("_SphereLightRadius");
public static readonly int _CameraFOV = Shader.PropertyToID("_CameraFOV");

// Ambient occlusion
public static readonly int _RaytracingAOIntensity = Shader.PropertyToID("_RaytracingAOIntensity");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ class HDDiffuseShadowDenoiser

// The resources quired by this component
ComputeShader m_ShadowDenoiser;
Texture3D m_ShadowFilterMapping;

// Kernels that we are using
int m_BilateralFilterHSingleDirectionalKernel;
Expand All @@ -30,7 +29,6 @@ public void Init(HDRenderPipelineRayTracingResources rpRTResources, SharedRTMana
m_RenderPipeline = renderPipeline;

m_ShadowDenoiser = rpRTResources.diffuseShadowDenoiserCS;
m_ShadowFilterMapping = rpRTResources.shadowFilterMapping;

m_BilateralFilterHSingleDirectionalKernel = m_ShadowDenoiser.FindKernel("BilateralFilterHSingleDirectional");
m_BilateralFilterVSingleDirectionalKernel = m_ShadowDenoiser.FindKernel("BilateralFilterVSingleDirectional");
Expand Down Expand Up @@ -64,20 +62,21 @@ public void DenoiseBufferDirectional(CommandBuffer cmd, HDCamera hdCamera,

// Convert the angular diameter of the directional light to radians (from degrees)
float lightAngle = angularDiameter * Mathf.PI / 180.0f;
float cameraFOV = hdCamera.camera.fieldOfView * Mathf.PI / 180.0f;

// Horizontal pass of the bilateral filter
int m_KernelFilter = singleChannel ? m_BilateralFilterHSingleDirectionalKernel : m_BilateralFilterHColorDirectionalKernel;

// Bind input uniforms
cmd.SetComputeFloatParam(m_ShadowDenoiser, HDShaderIDs._DirectionalLightAngle, lightAngle);
cmd.SetComputeIntParam(m_ShadowDenoiser, HDShaderIDs._DenoiserFilterRadius, kernelSize);
cmd.SetComputeFloatParam(m_ShadowDenoiser, HDShaderIDs._CameraFOV, cameraFOV);

// Bind Input Textures
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, noisySignal);
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._DistanceTexture, distanceSignal);
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._ShadowFilterMapping, m_ShadowFilterMapping);

// Bind output textures
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, intermediateBuffer0);
Expand All @@ -91,13 +90,13 @@ public void DenoiseBufferDirectional(CommandBuffer cmd, HDCamera hdCamera,
// Bind input uniforms
cmd.SetComputeIntParam(m_ShadowDenoiser, HDShaderIDs._DenoiserFilterRadius, kernelSize);
cmd.SetComputeFloatParam(m_ShadowDenoiser, HDShaderIDs._DirectionalLightAngle, lightAngle);
cmd.SetComputeFloatParam(m_ShadowDenoiser, HDShaderIDs._CameraFOV, cameraFOV);

// Bind Input Textures
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._DenoiseInputTexture, intermediateBuffer0);
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._DistanceTexture, distanceSignal);
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._ShadowFilterMapping, m_ShadowFilterMapping);

// Bind output textures
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_KernelFilter, HDShaderIDs._DenoiseOutputTextureRW, outputSignal);
Expand All @@ -122,17 +121,19 @@ public void DenoiseBufferSphere(CommandBuffer cmd, HDCamera hdCamera,
int numTilesX = (texWidth + (areaTileSize - 1)) / areaTileSize;
int numTilesY = (texHeight + (areaTileSize - 1)) / areaTileSize;

float cameraFOV = hdCamera.camera.fieldOfView * Mathf.PI / 180.0f;

// Bind input uniforms
cmd.SetComputeIntParam(m_ShadowDenoiser, HDShaderIDs._DenoiserFilterRadius, kernelSize);
cmd.SetComputeVectorParam(m_ShadowDenoiser, HDShaderIDs._SphereLightPosition, lightPosition);
cmd.SetComputeFloatParam(m_ShadowDenoiser, HDShaderIDs._SphereLightRadius, lightRadius);
cmd.SetComputeFloatParam(m_ShadowDenoiser, HDShaderIDs._CameraFOV, cameraFOV);

// Bind Input Textures
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterHSingleSphereKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterHSingleSphereKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterHSingleSphereKernel, HDShaderIDs._DenoiseInputTexture, noisySignal);
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterHSingleSphereKernel, HDShaderIDs._DistanceTexture, distanceSignal);
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterHSingleSphereKernel, HDShaderIDs._ShadowFilterMapping, m_ShadowFilterMapping);

// Bind output textures
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterHSingleSphereKernel, HDShaderIDs._DenoiseOutputTextureRW, intermediateBuffer0);
Expand All @@ -144,12 +145,12 @@ public void DenoiseBufferSphere(CommandBuffer cmd, HDCamera hdCamera,
cmd.SetComputeIntParam(m_ShadowDenoiser, HDShaderIDs._DenoiserFilterRadius, kernelSize);
cmd.SetComputeVectorParam(m_ShadowDenoiser, HDShaderIDs._SphereLightPosition, lightPosition);
cmd.SetComputeFloatParam(m_ShadowDenoiser, HDShaderIDs._SphereLightRadius, lightRadius);
cmd.SetComputeFloatParam(m_ShadowDenoiser, HDShaderIDs._CameraFOV, cameraFOV);

cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterVSingleSphereKernel, HDShaderIDs._DenoiseInputTexture, intermediateBuffer0);
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterVSingleSphereKernel, HDShaderIDs._DistanceTexture, distanceSignal);
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterVSingleSphereKernel, HDShaderIDs._DepthTexture, m_SharedRTManager.GetDepthStencilBuffer());
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterVSingleSphereKernel, HDShaderIDs._NormalBufferTexture, m_SharedRTManager.GetNormalBuffer());
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterVSingleSphereKernel, HDShaderIDs._ShadowFilterMapping, m_ShadowFilterMapping);

// Bind output textures
cmd.SetComputeTextureParam(m_ShadowDenoiser, m_BilateralFilterVSingleSphereKernel, HDShaderIDs._DenoiseOutputTextureRW, outputSignal);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,20 +24,16 @@
// Tile size of this compute
#define SHADOW_DENOISER_TILE_SIZE 8

// Texture used to adjust the filter size to avoid overblurring
TEXTURE3D(_ShadowFilterMapping);

// This value is the maximal distance value that the filter supports
#define RAY_TRACING_LIGHT_MAX_DISTANCE 50.0f
#define RAY_TRACING_OCCLUDER_MAX_DISTANCE 10.0f

// Ray tracing input textures
TEXTURE2D_X(_DenoiseInputTexture);
TEXTURE2D_X(_DistanceTexture);

// Generic denoiser inputs
int _DenoiserFilterRadius;

// Camera FOV
float _CameraFOV;

#if DIRECTIONAL_LIGHT
// Inputs for directional lights
float _DirectionalLightAngle;
Expand Down Expand Up @@ -66,34 +62,38 @@ void BILATERAL_FILTER(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupT
// Read the center pixel0
const BilateralData center = TapBilateralData(centerCoord);

// Compute the normalized view vector
float distanceValue = clamp(length(_WorldSpaceCameraPos - center.position) / RAY_TRACING_LIGHT_MAX_DISTANCE, 0.0, 1.0);
// Get the world space positon of the center pixel to filter
const float3 positionWS = GetAbsolutePositionWS(center.position);

#if DIRECTIONAL_LIGHT
// Evaluate the normalized solid angle of the light
float lightSolidAngle = _DirectionalLightAngle / (PI * 0.5f);
// For the directonal light, the solid angle can be used directly
float lightSolidAngle = _DirectionalLightAngle;
#else
// Compute the light vector
float lightPointDistance = clamp(length(_SphereLightPosition - GetAbsolutePositionWS(center.position)) / RAY_TRACING_LIGHT_MAX_DISTANCE, 0.0, 1.0);

float lightPointDistance = length(_SphereLightPosition - positionWS);
// Evaluate the normalized solid angle of the light
float lightSolidAngle = atan(_SphereLightRadius / lightPointDistance) / PI;
float lightSolidAngle = atan(_SphereLightRadius / lightPointDistance);
#endif
// Compute the distances we need for our filtering
const float distanceCameraToPlane = length(positionWS - _WorldSpaceCameraPos);
const float distancePlaneToObject = LOAD_TEXTURE2D_X(_DistanceTexture, centerCoord).x;

// Grab the normalized/clamped distance between the point and its occluder
float averageSurfaceDistance = clamp(LOAD_TEXTURE2D_X(_DistanceTexture, centerCoord).x / RAY_TRACING_OCCLUDER_MAX_DISTANCE, 0.0, 1.0);
// Compute the cone footprint on the image reflection plane for this configuration
const float brdfConeRadius = tan(lightSolidAngle * 0.5) * distancePlaneToObject * 2.0f;

// Compute the 3d uv value that we shall be using
float3 mappingUV = float3(averageSurfaceDistance, lightSolidAngle, distanceValue);
// We need to compute the view cone radius
const float viewConeRadius = brdfConeRadius * distanceCameraToPlane / (distancePlaneToObject + distanceCameraToPlane);

// Fetch the shadow scaling value
float2 radiusScale = SAMPLE_TEXTURE3D_LOD(_ShadowFilterMapping, s_trilinear_clamp_sampler, mappingUV, 0.0f).xy;
// Compute the view cone's half angle. This matches the FOV angle to see exactly the half of the cone (The tangent could be precomputed in the table)
const float viewConeHalfAngle = FastATanPos(viewConeRadius / distanceCameraToPlane);
// Given the camera's fov and pixel resolution convert the viewConeHalfAngle to a number of pixels
const float pixelDistance = viewConeHalfAngle / _CameraFOV * _ScreenSize.x;

// Evaluate the radius that should be used for the filter
#if FINAL_PASS
const float radius = _DenoiserFilterRadius * radiusScale.x;
const float radius = clamp(pixelDistance, 1, _DenoiserFilterRadius);
#else
const float radius = _DenoiserFilterRadius * radiusScale.y;
const float radius = clamp(pixelDistance, 1, _DenoiserFilterRadius);
#endif

// Compute the sigma value for our filter
Expand Down