Skip to content

Hdrp/path traced dof #164

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 18 commits into from
Apr 21, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions com.unity.render-pipelines.high-definition/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
- Added information for fabric materials in fabric scene
- Added a DisplayInfo attribute to specify a name override and a display order for Volume Component fields (used only in default inspector for now).
- Added Min distance to contact shadows.
- Added support for Depth of Field in path tracing (by sampling the lens aperture).

### Fixed
- Fix when rescale probe all direction below zero (1219246)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -517,9 +517,15 @@ void PoolSource(ref RTHandle src, RTHandle dst)
}
}

// If Path tracing is enabled, then DoF is computed in the path tracer by sampling the lens aperure (when using the physical camera mode)
bool isDoFPathTraced = (camera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) &&
camera.volumeStack.GetComponent<PathTracing>().enable.value &&
camera.camera.cameraType != CameraType.Preview &&
m_DepthOfField.focusMode == DepthOfFieldMode.UsePhysicalCamera);

// Depth of Field is done right after TAA as it's easier to just re-project the CoC
// map rather than having to deal with all the implications of doing it before TAA
if (m_DepthOfField.IsActive() && !isSceneView && m_DepthOfFieldFS)
if (m_DepthOfField.IsActive() && !isSceneView && m_DepthOfFieldFS && !isDoFPathTraced)
{
using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfField)))
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -458,6 +458,10 @@ static class HDShaderIDs
public static readonly int _RaytracingPixelSpreadAngle = Shader.PropertyToID("_RaytracingPixelSpreadAngle");
public static readonly string _RaytracingAccelerationStructureName = "_RaytracingAccelerationStructure";

// Path tracing variables
public static readonly int _PathTracedDoFConstants = Shader.PropertyToID("_PathTracedDoFConstants");
public static readonly int _InvViewportScaleBias = Shader.PropertyToID("_InvViewportScaleBias");

// Light Cluster
public static readonly int _MinClusterPos = Shader.PropertyToID("_MinClusterPos");
public static readonly int _MaxClusterPos = Shader.PropertyToID("_MaxClusterPos");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ public sealed class PathTracing : VolumeComponent
[Tooltip("Defines the maximum intensity value computed for a path segment.")]
public ClampedFloatParameter maximumIntensity = new ClampedFloatParameter(10f, 0f, 100f);
}

public partial class HDRenderPipeline
{
PathTracing m_PathTracingSettings = null;
Expand Down Expand Up @@ -95,6 +96,17 @@ internal void ResetPathTracing()
m_SubFrameManager.Reset();
}

private Vector4 ComputeDoFConstants(HDCamera hdCamera, PathTracing settings)
{
var dofSettings = hdCamera.volumeStack.GetComponent<DepthOfField>();
bool enableDof = (dofSettings.focusMode.value == DepthOfFieldMode.UsePhysicalCamera) && !(hdCamera.camera.cameraType == CameraType.SceneView);

// focalLength is in mm, so we need to convert to meters. We also want the aperture radius, not diameter, so we divide by two.
float apertureRadius = (enableDof && hdCamera.physicalParameters != null && hdCamera.physicalParameters.aperture > 0) ? 0.5f * 0.001f * hdCamera.camera.focalLength / hdCamera.physicalParameters.aperture : 0.0f;

return new Vector4(apertureRadius, dofSettings.focusDistance.value, 0.0f, 0.0f);
}

#if UNITY_EDITOR

private void OnSceneEdit()
Expand Down Expand Up @@ -275,6 +287,8 @@ void RenderPathTracing(HDCamera hdCamera, CommandBuffer cmd, RTHandle outputText
// Additional data for path tracing
cmd.SetRayTracingTextureParam(pathTracingShader, HDShaderIDs._RadianceTexture, m_RadianceTexture);
cmd.SetRayTracingMatrixParam(pathTracingShader, HDShaderIDs._PixelCoordToViewDirWS, hdCamera.mainViewConstants.pixelCoordToViewDirWS);
cmd.SetRayTracingVectorParam(pathTracingShader, HDShaderIDs._PathTracedDoFConstants, ComputeDoFConstants(hdCamera, m_PathTracingSettings));
cmd.SetRayTracingVectorParam(pathTracingShader, HDShaderIDs._InvViewportScaleBias, HDUtils.ComputeInverseViewportScaleBias(hdCamera));

// Run the computation
cmd.DispatchRays(pathTracingShader, "RayGen", (uint)hdCamera.actualWidth, (uint)hdCamera.actualHeight, 1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Sampling/Sampling.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Material/Builtin/BuiltinData.hlsl"

Expand All @@ -22,6 +23,10 @@ float4x4 _PixelCoordToViewDirWS;
int _RaytracingCameraSkyEnabled;
float3 _RaytracingCameraClearColor;

// DoF related parameters
float4 _PathTracedDoFConstants; // x: aperture radius, y: focus distance, zw: unused
float4 _InvViewportScaleBias;

// Output(s)
RWTexture2D<float4> _RadianceTexture;

Expand Down Expand Up @@ -66,12 +71,44 @@ void RayGen()
jitteredPixelCoord.x += GetSample(currentPixelCoord, _RaytracingSampleIndex, 40);
jitteredPixelCoord.y += GetSample(currentPixelCoord, _RaytracingSampleIndex, 41);

// Compute the ray direction, from those coordinates
float3 directionWS = -normalize(mul(jitteredPixelCoord, (float3x3)_PixelCoordToViewDirWS));
float3 directionWS;
float3 cameraPosWS;

float apertureRadius = _PathTracedDoFConstants.x;
if (apertureRadius == 0.0)
{
// Compute the ray direction from those coordinates (fast path for zero aperture)
directionWS = -normalize(mul(jitteredPixelCoord, (float3x3)_PixelCoordToViewDirWS));
cameraPosWS = _WorldSpaceCameraPos;
}
else
{
// Compute the ray origin and direction for a lens with non-zero aperture

// Apply the inverse viewport transform to go from viewport coordinates to NDC
jitteredPixelCoord.xy = jitteredPixelCoord.xy * _InvViewportScaleBias.xy + _InvViewportScaleBias.zw;

// Sample the lens apperture using the next available dimensions (we use 40 for path tracing, 2 for sub-pixel jittering, 64 for SSS -> 106, 107)
float r1 = GetSample(currentPixelCoord, _RaytracingSampleIndex, 106);
float r2 = GetSample(currentPixelCoord, _RaytracingSampleIndex, 107);
float2 uv = apertureRadius * SampleDiskUniform(r1, r2);

// Compute the new ray origin ( _ViewMatrix[0] = right, _ViewMatrix[1] = up, _ViewMatrix[2] = forward )
float focusDistance = _PathTracedDoFConstants.y;
float3 focusPoint = _WorldSpaceCameraPos - _ViewMatrix[2] * focusDistance;
cameraPosWS = _WorldSpaceCameraPos + _ViewMatrix[0] * uv.x + _ViewMatrix[1] * uv.y;

// Create the new view matrix
float3 newForward = normalize(focusPoint - cameraPosWS);
float3 newRight = cross(newForward, _ViewMatrix[1]);
float3x3 newViewMatrix = GetLocalFrame(newForward, newRight);

directionWS = normalize(mul(jitteredPixelCoord, newViewMatrix));
}

// Create the ray descriptor for this pixel
RayDesc rayDescriptor;
rayDescriptor.Origin = _WorldSpaceCameraPos;
rayDescriptor.Origin = cameraPosWS;
rayDescriptor.Direction = directionWS;
rayDescriptor.TMin = _RaytracingCameraNearPlane;
rayDescriptor.TMax = FLT_INF;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,23 @@ internal static Matrix4x4 ComputePixelCoordToWorldSpaceViewDirectionMatrix(float
return Matrix4x4.Transpose(worldToViewMatrix.transpose * viewSpaceRasterTransform);
}

// Scale and bias to transform unnormalized viewport/pixel coordinates to normalized device coordinates
internal static Vector4 ComputeInverseViewportScaleBias(HDCamera hdCamera)
{
float verticalFoV = hdCamera.camera.GetGateFittedFieldOfView() * Mathf.Deg2Rad;
Vector2 lensShift = hdCamera.camera.GetGateFittedLensShift();

float aspectRatio = hdCamera.camera.aspect < 0 ? hdCamera.screenSize.x * hdCamera.screenSize.w : hdCamera.camera.aspect;
float tanHalfVertFoV = Mathf.Tan(0.5f * verticalFoV);

// See the comment in ComputePixelCoordToWorldSpaceViewDirectionMatrix for the derivation
return new Vector4(
-2.0f * hdCamera.screenSize.z * tanHalfVertFoV * aspectRatio,
-2.0f * hdCamera.screenSize.w * tanHalfVertFoV,
(1.0f - 2.0f * lensShift.x) * tanHalfVertFoV * aspectRatio,
(1.0f - 2.0f * lensShift.y) * tanHalfVertFoV);
}

internal static float ComputZPlaneTexelSpacing(float planeDepth, float verticalFoV, float resolutionY)
{
float tanHalfVertFoV = Mathf.Tan(0.5f * verticalFoV);
Expand Down