一,前置知识
RenderPipeline
默认管线RenderPipeline
- 一些流程是由代码控制开关的,比如Depth Texture这一步,如果设置了depthTextureMode,则会把所有LightMode为ShadowCaster的Pass执行一遍并存储到Depth Texture中。
- 一些流程是由Shader 中的Tags { “LightMode” “RenderType” “Queue”}标注控制的。渲染时unity根据这些标签,吧Pass放在图中对应顺序运行,因此才可以实现透明物体的“先渲染不透明,再渲染透明”的操作。
Scriptable Render Pipeline可编程渲染管线
二,URP渲染流程(代码向)
Render函数
com.unity.render-pipelines.universal@7.7.1/Runtime/UniversalRenderPipeline.cs
protected override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
{
BeginFrameRendering(renderContext, cameras);
GraphicsSettings.lightsUseLinearIntensity = (QualitySettings.activeColorSpace == ColorSpace.Linear);
GraphicsSettings.useScriptableRenderPipelineBatching = asset.useSRPBatcher;
SetupPerFrameShaderConstants();
……
SortCameras(cameras);
for (int i = 0; i < cameras.Length; ++i)
{
var camera = cameras[i];
……
if (IsGameCamera(camera))
{
RenderCameraStack(renderContext, camera);
}
else
{
BeginCameraRendering(renderContext, camera);
……
UpdateVolumeFramework(camera, null);
RenderSingleCamera(renderContext, camera);
EndCameraRendering(renderContext, camera);
}
}
EndFrameRendering(renderContext, cameras);
}
BeginCameraRendering(context, currCamera);
UpdateVolumeFramework(currCamera, currCameraData);
InitializeCameraData(baseCamera, baseCameraAdditionalData, out var baseCameraData);
RenderSingleCamera(context, overlayCameraData, lastCamera, anyPostProcessingEnabled);
EndCameraRendering(context, currCamera);
RenderSingleCamera函数
其中最关键的是RenderSingleCamera函数:
static void RenderSingleCamera(ScriptableRenderContext context, CameraData cameraData, bool anyPostProcessingEnabled)
{
Camera camera = cameraData.camera;
var renderer = cameraData.renderer;
if (renderer == null)
{
Debug.LogWarning(string.Format("Trying to render {0} with an invalid renderer. Camera rendering will be skipped.", camera.name));
return;
}
if (!camera.TryGetCullingParameters(IsStereoEnabled(camera), out var cullingParameters))
return;
ScriptableRenderer.current = renderer;
bool isSceneViewCamera = cameraData.isSceneViewCamera;
ProfilingSampler sampler = (asset.debugLevel >= PipelineDebugLevel.Profiling) ? new ProfilingSampler(camera.name): _CameraProfilingSampler;
CommandBuffer cmd = CommandBufferPool.Get(sampler.name);
using (new ProfilingScope(cmd, sampler))
{
renderer.Clear(cameraData.renderType);
renderer.SetupCullingParameters(ref cullingParameters, ref cameraData);
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
……
var cullResults = context.Cull(ref cullingParameters);
InitializeRenderingData(asset, ref cameraData, ref cullResults, anyPostProcessingEnabled, out var renderingData);
……
renderer.Setup(context, ref renderingData);
renderer.Execute(context, ref renderingData);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
context.Submit();
ScriptableRenderer.current = null;
}
其中的InitializeRenderingData函数
static void InitializeRenderingData(UniversalRenderPipelineAsset settings, ref CameraData cameraData, ref CullingResults cullResults,
bool anyPostProcessingEnabled, out RenderingData renderingData)
{
……
renderingData.cullResults = cullResults;
renderingData.cameraData = cameraData;
InitializeLightData(settings, visibleLights, mainLightIndex, out renderingData.lightData);
InitializeShadowData(settings, visibleLights, mainLightCastShadows, additionalLightsCastShadows && !renderingData.lightData.shadeAdditionalLightsPerVertex, out renderingData.shadowData);
……
}
- InitializeLightData设置了最大灯光数maxPerObjectLights,GLES 2最多四盏,其余最多八盏,如下图。
- InitializeShadowData看到:
- 默认光照在pipelineasset设置,特殊光照在光源的UniversalAdditionalLightData组件设置
- 屏幕空间阴影需要设备GLES 2以上才支持
- 阴影质量由shadowmap分辨率和cascade数共同决定
其中的ForwardRenderer类
ForwardRenderer继承于ScriptableRenderer,它维护了一个ScriptableRenderPass的列表,在每帧前王列表里新增pass,然后执行pass渲染画面,每帧结束再清空列表。它的渲染资源被序列化为ScriptableRendererData。
ScriptableRenderer里的核心函数Setup和Execute每帧都会执行,其中Setup会把要执行的pass加入列表,Execute将列表里的pass按渲染顺序分类提取并执行。
ForwardRenderer下Setup函数(重点)
主要是将需要的pass加入渲染队列中。
public override void Setup(ScriptableRenderContext context, ref RenderingData renderingData)
{
……
bool isOffscreenDepthTexture = cameraData.targetTexture != null && cameraData.targetTexture.format == RenderTextureFormat.Depth;
if (isOffscreenDepthTexture)
{
……
for (int i = 0; i < rendererFeatures.Count; ++i)
{
if(rendererFeatures[i].isActive)
rendererFeatures[i].AddRenderPasses(this, ref renderingData);
}
EnqueuePass(m_RenderOpaqueForwardPass);
EnqueuePass(m_DrawSkyboxPass);
……
EnqueuePass(m_RenderTransparentForwardPass);
return;
}
……
if (cameraData.renderType == CameraRenderType.Base)
{
m_ActiveCameraColorAttachment = (createColorTexture) ? m_CameraColorAttachment : RenderTargetHandle.CameraTarget;
m_ActiveCameraDepthAttachment = (createDepthTexture) ? m_CameraDepthAttachment : RenderTargetHandle.CameraTarget;
……
}
else
{
m_ActiveCameraColorAttachment = m_CameraColorAttachment;
m_ActiveCameraDepthAttachment = m_CameraDepthAttachment;
}
ConfigureCameraTarget(m_ActiveCameraColorAttachment.Identifier(), m_ActiveCameraDepthAttachment.Identifier());
for (int i = 0; i < rendererFeatures.Count; ++i)
{
if(rendererFeatures[i].isActive)
rendererFeatures[i].AddRenderPasses(this, ref renderingData);
}
if (mainLightShadows)
EnqueuePass(m_MainLightShadowCasterPass);
if (additionalLightShadows)
EnqueuePass(m_AdditionalLightsShadowCasterPass);
if (requiresDepthPrepass)
{
m_DepthPrepass.Setup(cameraTargetDescriptor, m_DepthTexture);
EnqueuePass(m_DepthPrepass);
}
if (generateColorGradingLUT)
{
m_ColorGradingLutPass.Setup(m_ColorGradingLut);
EnqueuePass(m_ColorGradingLutPass);
}
EnqueuePass(m_RenderOpaqueForwardPass);
……
if (camera.clearFlags == CameraClearFlags.Skybox && (RenderSettings.skybox != null || cameraSkybox?.material != null) && !isOverlayCamera)
EnqueuePass(m_DrawSkyboxPass);
if (!requiresDepthPrepass && renderingData.cameraData.requiresDepthTexture && createDepthTexture)
{
m_CopyDepthPass.Setup(m_ActiveCameraDepthAttachment, m_DepthTexture);
EnqueuePass(m_CopyDepthPass);
}
if (renderingData.cameraData.requiresOpaqueTexture)
{
Downsampling downsamplingMethod = UniversalRenderPipeline.asset.opaqueDownsampling;
m_CopyColorPass.Setup(m_ActiveCameraColorAttachment.Identifier(), m_OpaqueColor, downsamplingMethod);
EnqueuePass(m_CopyColorPass);
}
……
if (transparentsNeedSettingsPass)
EnqueuePass(m_TransparentSettingsPass);
EnqueuePass(m_RenderTransparentForwardPass);
EnqueuePass(m_OnRenderObjectCallbackPass);
……
if (lastCameraInTheStack)
{
if (applyPostProcessing)
{
m_PostProcessPass.Setup(……);
EnqueuePass(m_PostProcessPass);
}
if (renderingData.cameraData.captureActions != null)
{
m_CapturePass.Setup(m_ActiveCameraColorAttachment);
EnqueuePass(m_CapturePass);
}
……
if (applyFinalPostProcessing)
{
m_FinalPostProcessPass.SetupFinalPass(sourceForFinalPass);
EnqueuePass(m_FinalPostProcessPass);
}
if (!cameraTargetResolved)
{
m_FinalBlitPass.Setup(cameraTargetDescriptor, sourceForFinalPass);
EnqueuePass(m_FinalBlitPass);
}
}
else if (applyPostProcessing)
{
m_PostProcessPass.Setup(……);
EnqueuePass(m_PostProcessPass);
}
……
}
可以看到可用的pass有MainLightShadowCasterPass、AdditionalLightsShadowCasterPass,DepthPrePass,ScreenSpaceShadowResolvePass,ColorGradingLutPass,RenderOpaqueForwardPass,DrawSkyboxPass,CopyDepthPass,CopyColorPass,TransparentForwardPass,RenderObjectCallbackPass,PostProcessPass,CapturePass,FinalPostProcessPass,FinalBlitPass等等。
- 注意这里比默认管线多了CopyColor和CopyDepth两个步骤(水面折射时抓取color用,但无法多重折射)
ForwardRenderer下Execute函数(重点)
用于执行各个队列里的pass
public void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
……
SortStable(m_ActiveRenderPassQueue);
……
FillBlockRanges(blockEventLimits, blockRanges);
……
SetupLights(context, ref renderingData);
ExecuteBlock(RenderPassBlock.BeforeRendering, blockRanges, context, ref renderingData);
……
ExecuteBlock(RenderPassBlock.MainRenderingOpaque, blockRanges, context, ref renderingData, eyeIndex);
ExecuteBlock(RenderPassBlock.MainRenderingTransparent, blockRanges, context, ref renderingData, eyeIndex);
DrawGizmos(context, camera, GizmoSubset.PreImageEffects);
ExecuteBlock(RenderPassBlock.AfterRendering, blockRanges, context, ref renderingData, eyeIndex);
InternalFinishRendering(context, cameraData.resolveFinalTarget);
……
}
RenderPassEvent字段大小:
public enum RenderPassEvent
{
BeforeRendering = 0,
BeforeRenderingShadows = 50,
AfterRenderingShadows = 100,
BeforeRenderingPrepasses = 150,
AfterRenderingPrePasses = 200,
BeforeRenderingOpaques = 250,
AfterRenderingOpaques = 300,
BeforeRenderingSkybox = 350,
AfterRenderingSkybox = 400,
BeforeRenderingTransparents = 450,
AfterRenderingTransparents = 500,
BeforeRenderingPostProcessing = 550,
AfterRenderingPostProcessing = 600,
AfterRendering = 1000,
}
三,后处理
- URP后处理是有4部分组成,分别是渲染器(Forward Renderer)— 后处理(Volume) — Pass模块 —Shader:
3.1 RenderFeature后处理
- RenderFeature是用来拓展Pass的,依附于ForwardRenderer,可以在渲染的某个时机插入一次渲染命令(例如渲染不透明后描边、渲染半透明后滤镜等),因此一般的全屏渲染后处理可以使用RenderFeature处理。
- 「注意:在URP里原MonoBehaviour里的OnRenderImage函数被取消了,需要使用ScriptableRenderPass 来完成类似功能」
RenderFeature在URPTest_Renderer.asset文件的面板下可以看到,写好类后通过“Add Renderer Feature”新增: 按照下图步骤creat RenderFeature类文件:
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
public class XXXTest : ScriptableRendererFeature
{
[System.Serializable]
public class XXXSettings
{
public RenderPassEvent renderPassEvent = RenderPassEvent.AfterRenderingTransparents;
public Material material = null;
}
public class CustomRenderPass : ScriptableRenderPass
{
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor){}
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData){}
public override void FrameCleanup ( CommandBuffer){}
}
public XXXSettings settings = new XXXSettings();
CustomRenderPass scriptablePass;
public override void Create(){}
public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData){}
}
- 其中Create()进行初始化操作,可以把settings里的参数从面板上赋予给CustomRenderPass:
public override void Create()
{
scriptablePass = new CustomRenderPass();
scriptablePass.material = settings.material;
scriptablePass.renderPassEvent = settings.renderPassEvent;
scriptablePass.Scale = settings.Scale;
}
- AddRenderPasses()将CustomRenderPass加入队列,也可以在这里把相机输出给到CustomRenderPass(需要增加Setup函数)。
public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
{
scriptablePass.Setup(renderer.cameraColorTarget);
renderer.EnqueuePass(scriptablePass);
}
- Renderer Pass里核心是Execute函数,基本相当于内置管线的OnRenderImage函数
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
RenderTargetIdentifier cameraColorTexture = new RenderTargetIdentifier("_CameraColorTexture");
material.SetFloat("_Distance", _Distance);
material.SetFloat("_Width", _Width);
CommandBuffer cmd = CommandBufferPool.Get();
cmd.Blit(cameraColorTexture, tmpTex, material,passint);
cmd.Blit(tmpTex, cameraColorTexture);
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
CommandBufferPool.Release(cmd);
}
3.2 Volume后处理
3.2.1urp自带后处理
- URP自带了很多后处理集成在Volume里,使用的时候要在GameObject里创建Volume组件,其中Global Volume代表后处理效果应用所有摄像机;Box Volume是在一个盒子区域内才应用;Sphere Volume是在一个球形区域才应用;Convex Mesh Volume是使用自定义的网格区域。
- Volume组件上有几个参数如下表:
名称 | 作用 |
---|
Mode | Global:无边界的影响每一个摄像机; Local:指定边界,只影响边界内部的摄像机 | Weight | Volume在场景中的影响值 | Priority | 当场景中有多个Volume时,URP通过此值决定使用哪一个Volume,优先使用priority更高的 | Profile | Profile文件存储URP处理Volume的数据 |
- 需要创建一个Volume Profile来设置后处理效果,并且需要在相机里勾选Post Processing开关才能看到效果
- URP自带的后处理效果有:辉光(Bloom)、通道混合(Channel Mixer)、色差(Chromatic Aberration)、色彩调整(Color Adjustments)、曲线(Color Curves)、景深(Depth Of Field)、胶片颗粒(Film Grain)、镜头变形(Lens Distortion)、暗部gamma亮部(Lift Gamma Gain)、运动模糊(Motion Blur)、帕尼尼投影(Panini Projection)、阴影中间调高光(Shadows Midtones Hightlights)、色调分离(Split Toning)、色调(Tonemapping)、暗角(Vignette)、白平衡(White Balance)
3.2.2拓展Volume后处理
-
在Volume里拓展后处理,除了上边用到的RenderFeature Class和RenderPass Class外,还需要VolumeComponent Class。一共2个脚本1个shader文件。 -
VolumeComponent Class:在com.unity.render-pipelines.universal@7.7.1/Runtime/Overrides里可以找到所有为Volume配置文件添加的效果的属性脚本,参考里边的写法创建拓展后处理的效果参数。
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("My Post-processing/Test")]
public sealed class Test : VolumeComponent, IPostProcessComponent
{
[Tooltip("Strength of the bloom filter.")]
public MinFloatParameter intensity = new MinFloatParameter(0f, 0f);
public ClampedFloatParameter scatter = new ClampedFloatParameter(0.7f, 0f, 1f);
public ColorParameter tint = new ColorParameter(Color.white, false, false, true);
public BoolParameter highQualityFiltering = new BoolParameter(false);
public TextureParameter dirtTexture = new TextureParameter(null);
public bool IsActive() => intensity.value > 0f;
public bool IsTileCompatible() => false;
}
}
using System;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
public class XXXTest : ScriptableRendererFeature
{
[System.Serializable]
public class XXXSettings
{
public RenderPassEvent renderPassEvent = RenderPassEvent.BeforeRenderingPostProcessing;
public Shader shader;
}
public XXXSettings settings = new XXXSettings();
CustomRenderPass scriptablePass;
public override void Create()
{
this.name = "TestPass";
scriptablePass = new CustomRenderPass(RenderPassEvent.BeforeRenderingPostProcessing, settings.shader);
}
public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
{
scriptablePass.Setup(renderer.cameraColorTarget);
renderer.EnqueuePass(scriptablePass);
}
}
public class CustomRenderPass : ScriptableRenderPass
{
static readonly string k_RenderTag = "Test Effects";
static readonly int MainTexId = Shader.PropertyToID("_MainTex");
static readonly int TempTargetId = Shader.PropertyToID("_TempTargetColorTint");
Test test;
Material material;
RenderTargetIdentifier cameraColorTexture;
public ColorTintPass(RenderPassEvent evt, Shader testshader)
{
renderPassEvent = evt;
var shader = testshader;
if (shader = null)
{
Debug.LogError("没有指定Shader");
return;
}
material = CoreUtils.CreateEngineMaterial(testshader);
}
public void Setup(in RenderTargetIdentifier currentTarget)
{
this.cameraColorTexture = currentTarget;
}
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (material == null)
{
Debug.LogError("材质初始化失败");
return;
}
if (!renderingData.cameraData.postProcessEnabled){return;}
var stack = VolumeManager.instance.stack;
test = stack.GetComponent<Test>();
if (test == null)
{
Debug.LogError(" Volume组件获取失败 ");
return;
}
var cmd = CommandBufferPool.Get(k_RenderTag);
Render(cmd, ref renderingData);
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
void Render(CommandBuffer cmd, ref RenderingData renderingData)
{
ref var cameraData = ref renderingData.cameraData;
var camera = cameraData.camera;
var source = cameraColorTexture;
int destination = TempTargetId;
colorTintMaterial.SetColor("_Color", test.tint.value);
cmd.SetGlobalTexture(MainTexId, source);
cmd.GetTemporaryRT(destination, cameraData.camera.scaledPixelWidth, cameraData.camera.scaledPixelHeight, 0, FilterMode.Trilinear, RenderTextureFormat.Default);
cmd.Blit(source, destination);
cmd.Blit(destination, source, material, 0);
}
}
参考资料
1,URP主要源码解析 2,URP学习之三 3,URP/LWRP学习入门 4,URP屏幕后处理 5,URP | 后处理-自定义后处理
|