Unity中URP下使用屏幕坐标采样深度图


前言

在上一篇文章中,我们实现了URP下深度图的使用。

但是,因为是使用模型UV采样的原因。所以,深度图效果不对

请添加图片描述

所以,在这一篇文章中,我们使用屏幕坐标来采样深度图。


一、Unity使用了ComputeScreenPos函数得到屏幕坐标

1、 我们来看一下这个函数干了什么

在这里插入图片描述

齐次裁剪空间进行透视除法

  • x n d c = x w x_{ndc} = \frac{x}{w} xndc=wx

然后进行如下步骤,化简得到该函数结果

  • x s c r e e n = ( x n d c ∗ 0.5 + 0.5 ) ∗ w i d t h x_{screen} = (x_{ndc}*0.5+0.5)*width xscreen=(xndc0.5+0.5)width
  • x s c r e e n = ( x w ∗ 0.5 + 0.5 ) ∗ w i d t h x_{screen} = (\frac{x}{w}*0.5+0.5)*width xscreen=(wx0.5+0.5)width
  • x s c r e e n w i d t h = ( x w ∗ 0.5 + 0.5 ) \frac{x_{screen}}{width}=(\frac{x}{w}*0.5+0.5) widthxscreen=(wx0.5+0.5)
  • x s c r e e n w i d t h ∗ w = ( x ∗ 0.5 + w ∗ 0.5 ) \frac{x_{screen}}{width}*w=(x*0.5+w*0.5) widthxscreenw=(x0.5+w0.5)

2、我们看一下该函数实现该结果的意义

x s c r e e n w i d t h ∗ w \frac{x_{screen}}{width}*w widthxscreenw

  • x s c r e e n = 0 x_{screen} = 0 xscreen=0
    x s c r e e n w i d t h ∗ w = 0 \frac{x_{screen}}{width}*w = 0 widthxscreenw=0

    扫描二维码关注公众号,回复: 17316230 查看本文章
  • x s c r e e n = w i d t h x_{screen} =width xscreen=width
    x s c r e e n w i d t h ∗ w = w \frac{x_{screen}}{width}*w = w widthxscreenw=w

  • 得到最终o的范围

  • ( [ 0 , w ] , [ 0 , w ] , z , w ) ([0,w],[0,w],z,w) ([0,w],[0,w],z,w)

  • ( [ 0 , w ] w , [ 0 , w ] w , z w , 1 ) (\frac{[0,w]}{w},\frac{[0,w]}{w},\frac{z}{w},1) (w[0,w],w[0,w],wz,1)

  • ( [ 0 , 1 ] , [ 0 , 1 ] , z w , 1 ) ([0,1],[0,1],\frac{z}{w},1) ([0,1],[0,1],wz,1)


二、在Shader中使用(法一)

1、在Varying结构体中

float4 screenPos : TEXCOORD1;

2、在顶点着色器中

o.screenPos = ComputeScreenPos(o.positionCS);

3、在片元着色器中

float2 uv = i.screenPos.xy / i.screenPos.w;

float4 cameraDepthTex = SAMPLE_TEXTURE2D(_CameraDepthTexture,sampler_CameraDepthTexture,uv);
float depthTex = Linear01Depth(cameraDepthTex,_ZBufferParams);
return depthTex;


三、在Shader中使用(法二)

  • 原理:用 齐次裁剪坐标 屏幕宽高 \frac{齐次裁剪坐标} {屏幕宽高} 屏幕宽高齐次裁剪坐标刚好得到[0,1]之间的屏幕uv坐标

1、在片元着色器中

float2 uv = i.positionCS/ _ScreenParams.xy;

float4 cameraDepthTex = SAMPLE_TEXTURE2D(_CameraDepthTexture,sampler_CameraDepthTexture,uv);
float depthTex = Linear01Depth(cameraDepthTex,_ZBufferParams);
return depthTex;


四、最终效果

请添加图片描述

Shader "MyShader/URP/P4_1"
{
    Properties 
    {
        _Color("Color",Color) = (0,0,0,0)
        _MainTex("MainTex",2D) = "white"{}
    }
    SubShader
    {
        Tags
        {
            //告诉引擎,该Shader只用于 URP 渲染管线
            "RenderPipeline"="UniversalPipeline"
            //渲染类型
            "RenderType"="Transparent"
            //渲染队列
            "Queue"="Transparent"
        }
        //Blend One One
        ZWrite Off
        Pass
        {
            Name "Unlit"
          
            HLSLPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            // Pragmas
            #pragma target 2.0
            
            // Includes
            #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl"
            #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
            #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Input.hlsl"

            CBUFFER_START(UnityPerMaterial)
            half4 _Color;
            CBUFFER_END

            
            //纹理的定义,如果是编译到GLES2.0平台,则相当于sample2D _MainTex;否则相当于 Texture2D _MainTex;
            TEXTURE2D(_MainTex);SAMPLER(SamplerState_linear_mirrorU_ClampV); float4 _MainTex_ST;
            TEXTURE2D(_CameraDepthTexture);SAMPLER(sampler_CameraDepthTexture);
            
            //struct appdata
            //顶点着色器的输入
            struct Attributes
            {
                float3 positionOS : POSITION;
                float2 uv : TEXCOORD0;
            };
            //struct v2f
            //片元着色器的输入
            struct Varyings
            {
                float4 positionCS : SV_POSITION;
                float2 uv : TEXCOORD0;
                float4 screenPos : TEXCOORD1;
            };
            //v2f vert(Attributes v)
            //顶点着色器
            Varyings vert(Attributes v)
            {
                Varyings o = (Varyings)0;
                float3 positionWS = TransformObjectToWorld(v.positionOS);
                o.positionCS = TransformWorldToHClip(positionWS);
                o.uv = TRANSFORM_TEX(v.uv,_MainTex);
                o.screenPos = ComputeScreenPos(o.positionCS);
                return o;
            }
            //fixed4 frag(v2f i) : SV_TARGET
            //片元着色器
            half4 frag(Varyings i) : SV_TARGET
            {
                half4 c;
                
                float4 mainTex = SAMPLE_TEXTURE2D(_MainTex,SamplerState_linear_mirrorU_ClampV,i.uv);
                //c = _Color *  mainTex;
                //深度图
                //float2 uv = i.screenPos.xy / i.screenPos.w;
                float2 uv = i.positionCS/ _ScreenParams.xy;
                float4 cameraDepthTex = SAMPLE_TEXTURE2D(_CameraDepthTexture,sampler_CameraDepthTexture,uv);
                float depthTex = Linear01Depth(cameraDepthTex,_ZBufferParams);
                return depthTex;
            }
            ENDHLSL
        }
    }

    FallBack "Hidden/Shader Graph/FallbackError"
}

猜你喜欢

转载自blog.csdn.net/qq_51603875/article/details/135467118