【Unity】RenderFeature应用(简单场景扫描效果)

【Unity】RenderFeature应用(简单场景扫描效果)

RenderFeature 是一个用于渲染图形的概念,通常在图形引擎或游戏引擎中使用。它是一个模块化的组件,负责处理特定的渲染功能,例如阴影、光照、粒子效果等。

点击地面生成一个不断扩展的圆光效果,用于实现一些画面的特效

实现效果

一、实现方法

1.要求和原理

案例是基于unity urp渲染管线制作,使用了RenderFeature后处理效果。

基本原理一句话,通过相机深度图重建像素的世界空间位置,空间坐标和点击点的坐标进行距离运算画出圆。

2.实现步骤

1)创建UniversalRenderPipelineAsset

​ 在编写RenderFeature前需要创建UniversalRenderPipelineAsset

​ Create→Randering→URPAsset(with Universal Render)

​ 创建后会同时生成UniversalRenderPipelineAsset和UniversalRenderData

​ 动态设置当前UniversalRenderPipelineAsset的方法(也可以手动设置)

   //使用的UniversalRenderPipelineAsset
    public UniversalRenderPipelineAsset UniversalRenderPipelineAsset;
    void Start()
    {
        //分别在Graphics和 Quality里设置成使用的UniversalRenderPipelineAsset
        GraphicsSettings.renderPipelineAsset = UniversalRenderPipelineAsset;
        QualitySettings.renderPipeline = UniversalRenderPipelineAsset;
    }

2)编写RenderFeature

​ 创建RenderFeature,具体介绍可以参见【Unity】RenderFeature笔记

​ Create→Randering→RenderFeature

​ 下面是通用的shader后处理方法,不同的是参数内容和方法

using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;

public class ScanRenderPassFeature : ScriptableRendererFeature
{
    class CustomRenderPass : ScriptableRenderPass
    {
        public Material _Material;
        public Vector4 _Pos;//点击点
        public Color _Color;//线颜色
        public float _Interval;//线间距
        public float _Strength;//强度范围

        public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
        {
           // Matrix4x4 frustumCorners = GetFrustumCornersRay();
            CommandBuffer cmd = CommandBufferPool.Get("ScanRender");
            _Material.SetVector("_CentorPoint", _Pos);
            _Material.SetColor("_Color", _Color);
            _Material.SetFloat("_Interval", _Interval);
            _Material.SetFloat("_Strength", _Strength);
            cmd.Blit(colorAttachment, RenderTargetHandle.CameraTarget.Identifier(), _Material);
            //执行CommandBuffer
            context.ExecuteCommandBuffer(cmd);
            //回收CommandBuffer
            CommandBufferPool.Release(cmd);
        }
    }

    CustomRenderPass m_ScriptablePass;
    public Shader ScanShader;
    public Vector4 Pos;
    public Color Color;
    public float Interval;//间距
    public float Strength;//强度
    /// <inheritdoc/>
    public override void Create()
    {
        m_ScriptablePass = new CustomRenderPass();
        m_ScriptablePass._Material = new Material(ScanShader);
        m_ScriptablePass._Pos = Pos;
        m_ScriptablePass._Color = Color;
        m_ScriptablePass._Interval = Interval;
        m_ScriptablePass._Strength = Strength;
        // Configures where the render pass should be injected.
        m_ScriptablePass.renderPassEvent = RenderPassEvent.AfterRendering;
    }
    public void SetParam()
    {
        m_ScriptablePass._Pos = Pos;
        m_ScriptablePass._Strength = Strength;
    }
    public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
    {
        renderer.EnqueuePass(m_ScriptablePass);
    }
}

3)编写shader

​ shader要实现的是两个功能,一是深度图重建像素的世界空间位置,再是根据空间位置画出扩展圆

​ 重建像素的世界空间位置可以参照官方的案例

从深度纹理重建像素的世界空间位置 |URP |7.7.1 (unity3d.com)

​ 核心方法:

​ ComputeWorldSpacePosition:是一个用于计算物体在世界空间中位置的函数。它通常用于计算游戏中的物体 在世界坐标系中的位置。

​ SampleSceneDepth:用于获取深度图像。它主要用于实现一些基本的深度相关功能,比如观察场景中物体深 度信息、计算物体之间的距离等。

​ 下面结合官方的方法进行修改

Shader "Unlit/ScanShaderURP"
{
    
    
    Properties
    {
    
    
        _CentorPoint("CentrePoint",Vector) = (0, 0, 0, 0)
        _Color("color",Color) = (1,1,1,1)   //颜色
    }
    SubShader
    {
    
    
        Tags {
    
     "RenderType"="Opaque" }
        LOD 100

        Pass
        {
    
    
            HLSLPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/DeclareDepthTexture.hlsl"
          
           struct Attributes
            {
    
    
            float4 positionOS   : POSITION;
        };

        struct Varyings
        {
    
    
            float4 positionHCS  : SV_POSITION;
        };

            float4x4 _FrustumCornersRay;
            float  _Interval;//间距
            float _Strength;//强度
            sampler2D _CameraColorTexture;
            float4 _CentorPoint;
            float4 _Color;
            Varyings vert(Attributes IN)
            {
    
    
                Varyings OUT;
                OUT.positionHCS = TransformObjectToHClip(IN.positionOS.xyz);
                return OUT;
            }
            half4 frag(Varyings IN) : SV_Target
            {
    
    
                 //uv变换
                float2x2 muls = float2x2(-1, 0, 0, 1);
                float2 centerUV = float2(1, 0);
                float2 UV =1-( mul(( IN.positionHCS.xy / _ScaledScreenParams.xy), muls)+centerUV);
                //获取深度图
#if UNITY_REVERSED_Z
                real depth = SampleSceneDepth(UV);
#else
                real depth = lerp(UNITY_NEAR_CLIP_VALUE, 1, SampleSceneDepth(UV));
#endif
                //获取世界坐标位置
                float3 worldPos = ComputeWorldSpacePosition(UV, depth, UNITY_MATRIX_I_VP);
                half4 col2 = tex2D(_CameraColorTexture, UV);
                float lerpValue = 0;
                //对截面外的空间进行屏蔽
                if (depth < _ProjectionParams.z - 1) {
    
    
                    float Mul = distance(_CentorPoint.xyz, worldPos.xyz);
                    //change控制圈的距离
                    float change = _Strength;
                //Mul的值是一定大于0的
                //第一个smoothstep小于change的值裁剪为0,大于_Interval + change的为1
                //第二个smoothstep大于_Interval + change的为1,小于的为0
                //两smoothstep相减得到0 + change和 _Interval + change距离间0到1的变化,其余为0
                    float lerp1 = smoothstep(0 + change, _Interval + change, Mul);
                    float lerp2 = smoothstep(_Interval + change, _Interval + change, Mul);
                    float dis = lerp1 - lerp2;
                    lerpValue = dis;
                }
                half4 myCol = lerp(col2, _Color, lerpValue);
                return myCol;
            }
                ENDHLSL
        }
    }
}

4)控制方法


using System.Linq;
using UnityEngine;
using UnityEngine.Rendering.Universal;

public class ScanControl : MonoBehaviour
{
    public UniversalRendererData renderData;
    ScanRenderPassFeature custom;
    private void Start()
    {
      custom = renderData.rendererFeatures.OfType<ScanRenderPassFeature>().FirstOrDefault();
    }
    private void Update()
    {
        if (Input.GetMouseButtonDown(1))
        {
            Ray ray = Camera.main.ScreenPointToRay(Input.mousePosition);
            RaycastHit hit;
            if (Physics.Raycast(ray, out hit))
            {
                Vector3 vector = hit.point;
                Vector4 vector4 = new Vector4(vector.x, vector.y, vector.z, 1);
                custom.Pos = vector4;
                custom.Strength = 0;
            }
        }
    }
    private void LateUpdate()
    {
        custom.Strength += Time.deltaTime*10;
        custom.SetParam();
    }
}

3.在built-in里的实现方法

shader

Shader "Unlit/ScanShaderBuiltIn"
{
    
    
    Properties
    {
    
    

        _MainTex("Base (RGB)", 2D) = "white" {
    
    } // 主纹理
        _CentorPoint("CentrePoint",Vector) = (0, 0, 0, 0)
        _Color("color",Color) = (1,1,1,1)   //颜色,一般用fixed4
        _InverseZ("InverseZ", Float) = -1 
    }
    SubShader
    {
    
    
        Tags {
    
     "RenderType"="Opaque" }
        LOD 100

        Pass
        {
    
    
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            #include "UnityCG.cginc"

            struct appdata
            {
    
    
                float4 vertex : POSITION;
                float2 uv : TEXCOORD0;
            };

            struct v2f
            {
    
    
                float2 uv : TEXCOORD0;
                float4 pos : SV_POSITION;
                float3 viewVec :TEXCOORD1;
            };
            sampler2D _MainTex; // 主纹理
            float _Interval;//间距
            float _Strength;//强度
            float _InverseZ;
            sampler2D _CameraColorTexture;
            sampler2D _CameraDepthTexture;
            float4 _CentorPoint;
            fixed4 _Color;
            v2f vert (appdata v)
            {
    
    
                v2f o;
                o.uv = v.uv;
                o.pos = UnityObjectToClipPos(v.vertex); //MVP变换
                float4 screenPos = ComputeScreenPos(o.pos); // 计算“齐次空间”下的屏幕坐标
               // float4 ndcPos = (screenPos / screenPos.w) * 2 - 1; //屏幕坐标--->ndc坐标变换公式
               // float4 ndcPos = o.pos / o.pos.w; //手动进行透视除法
                float3 ndcPos = float3(o.uv.xy * 2.0 - 1.0, 1); //直接把uv映射到ndc坐标
                float far = _ProjectionParams.z; //获取投影信息的z值,代表远平面距离
                float3 clipVec = float3(ndcPos.x, ndcPos.y, ndcPos.z * _InverseZ) * far; //裁切空间下的视锥顶点坐标
                o.viewVec = mul(unity_CameraInvProjection, clipVec.xyzz).xyz; //观察空间下的视锥向量
                return o;
            }

            fixed4 frag (v2f i) : SV_Target
            {
    
    
                fixed4 col2 = tex2D(_MainTex, i.uv);
           //   fixed4 col2 = tex2D(_CameraColorTexture, i.uv);
                float depth = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture,i.uv);//采样深度图
                depth = Linear01Depth(depth); //转换为线性深度
                float3 viewPos = i.viewVec * depth; //获取实际的观察空间坐标(插值后)
                float3  worldPos = mul(unity_CameraToWorld, float4(viewPos,1)).xyz; //观察空间-->世界空间坐标
                float factor = 0;
                if (depth < _ProjectionParams.z - 1) {
    
    
                    float Mul = distance(_CentorPoint.xyz, worldPos.xyz);
                    float change = _Strength;
                    //Mul的值是一定大于0的
                   //第一个smoothstep小于change的值裁剪为0,大于_Interval + change的为1
                   //第二个smoothstep大于_Interval + change的为1,小于的为0
                   //两smoothstep相减得到0 + change和 _Interval + change距离间0到1的变化,其余为0
                    float lerp1 = smoothstep(0 + change, _Interval + change, Mul);
                    float lerp2 = smoothstep(_Interval + change, _Interval + change, Mul);
                    float dis = lerp1 - lerp2;
                    float lerpDis = smoothstep(0.99, 1, dis);
                    factor = dis;
                }
                fixed4 myCol = lerp(col2, _Color, factor);
                return myCol;
         
            }
            ENDCG
        }
    }
}

控制脚本

using System.Collections;
using System.Collections.Generic;
using UnityEngine;

public class ScanManager : MonoBehaviour
{
    [SerializeField]
    private bool enableWave = false; // 是否开启扫描特效
    [SerializeField]
    private Shader scanShader;
    private Material material = null; // 材质
     Vector4 _Pos=Vector4.zero;
    public Color _Color;
     float _Interval=1;//间距
     float _Strength=0;//强度
    // Start is called before the first frame update
    void Start()
    {
        material = new Material(scanShader);
    }

    // Update is called once per frame
    void Update()
    {
        if (Input.GetMouseButtonDown(1))
        {
            Ray ray = Camera.main.ScreenPointToRay(Input.mousePosition);
            RaycastHit hit;
            if (Physics.Raycast(ray, out hit))
            {
                Vector3 vector = hit.point;
                Vector4 vector4 = new Vector4(vector.x, vector.y, vector.z, 1);
                Debug.Log(vector4);
                _Pos = vector4;
                _Strength = 0;
                enableWave = true;
            }
        }
    }

    private void OnRenderImage(RenderTexture src, RenderTexture dest)
    {
     
        if (enableWave)
        {
            _Strength += Time.deltaTime * 10;
            material.SetVector("_CentorPoint", _Pos);
            material.SetColor("_Color", _Color);
            material.SetFloat("_Interval", _Interval);
            material.SetFloat("_Strength", _Strength);
            Graphics.Blit(src, dest, material);
        }
        else
        {
            Graphics.Blit(src, dest);
        }
    }


}

猜你喜欢

转载自blog.csdn.net/dxs1990/article/details/134081101