可编程渲染管线使用教程
SRP 可以处理Canvas为Screen Space - Overlay的渲染
安装插件
首先进入package manager,下载Core RP Lib组件
创建渲染管线
编写渲染管线逻辑脚本
新建脚本取名为MPipeLine,该脚本用于实现渲染管线的处理逻辑
csharp
using UnityEngine;
using UnityEngine.Rendering;
public class MPipeLine : RenderPipeline
{
public MPipeLine() { }
protected override void Render(ScriptableRenderContext context, Camera[] cameras)
{
// 创建渲染指令,默认填充白色
var cmd = new CommandBuffer();
cmd.ClearRenderTarget(true, true, Color.white);
context.ExecuteCommandBuffer(cmd);
cmd.Release();
// 提交渲染指令
context.Submit();
}
}
编写渲染管线与编辑器关联
创建脚本MPipelineAsset,用于与unity编辑器建立关联,允许将工程与自定义渲染管线进行绑定
csharp
using UnityEngine;
using UnityEngine.Rendering;
[CreateAssetMenu(menuName = "Rendering/MPipelineAsset")]
public class MPipelineAsset : RenderPipelineAsset
{
protected override RenderPipeline CreatePipeline()
{
// 渲染逻辑脚本
return new MPipeLine();
}
}
创建并配置PipeLine Asset
首先创建Asset文件
点击Edit > projectsettings > graphics,设置asset为我们刚才新建的那个管线资源
编辑渲染管线逻辑
渲染逻辑有两种写法,一种是直接创建指令
csharp
//var cmd = new CommandBuffer() { name = "clear" };
//cmd.ClearRenderTarget(true, true, Color.white);
//context.ExecuteCommandBuffer(cmd);
//cmd.Release();
//context.Submit();
另一种是调用渲染api来实现,所有的逻辑要包在Begin、end之间
csharp
RenderPipeline.BeginFrameRendering(context, cameras);
// 要渲染的相机
Camera camera = cameras[0];
// 相机渲染 不透明
RenderCamera(context, camera, "shader1", SortingCriteria.CommonOpaque, RenderQueueRange.opaque);
RenderCamera(context, camera, "shader2", SortingCriteria.CommonTransparent, RenderQueueRange.transparent);
// 结束帧渲染你
RenderPipeline.EndFrameRendering(context, cameras);
我们这里使用后者来分别创建透明、不透明、及天空盒的渲染逻辑
csharp
protected override void Render(ScriptableRenderContext context, Camera[] cameras)
{
// 开始帧渲染
RenderPipeline.BeginFrameRendering(context, cameras);
// 要渲染的相机
Camera camera = cameras[0];
// 相机渲染 不透明, shader1是自建shader,代码在后面
RenderCamera(context, camera, "shader1", SortingCriteria.CommonOpaque, RenderQueueRange.opaque);
// 相机渲染 不透明, shader2是自建shader,代码在后面
RenderCamera(context, camera, "shader2", SortingCriteria.CommonTransparent, RenderQueueRange.transparent);
// 结束帧渲染
RenderPipeline.EndFrameRendering(context, cameras);
}
private void RenderCamera(ScriptableRenderContext context, Camera camera, string TagId, SortingCriteria criteria, RenderQueueRange queue)
{
// 开始渲染摄像机
RenderPipeline.BeginCameraRendering(context, camera);
// Camera 区域剔除
ScriptableCullingParameters cullingParameters = new ScriptableCullingParameters();
cullingParameters.cullingOptions |= CullingOptions.OcclusionCull;
// 剔除除了default layer之外的layer
cullingParameters.cullingMask = 1 << 0;
camera.TryGetCullingParameters(out cullingParameters);
var cullingResults = context.Cull(ref cullingParameters);
// 更新当前摄像机内置着色器变量值
context.SetupCameraProperties(camera);
// DrawingSettings用来描述可见物体的排序方式,以及绘制使用的Shader Pass
ShaderTagId shaderTagId = new ShaderTagId(TagId);
var sortingSettings = new SortingSettings(camera) {
criteria = criteria
};
DrawingSettings drawingSettings = new DrawingSettings(
shaderTagId,
sortingSettings
);
// 过滤 FilteringSettings用来描述渲染时如何过滤可见物体
FilteringSettings filteringSettings = new FilteringSettings(queue);
// 绘制图形
context.DrawRenderers(cullingResults, ref drawingSettings, ref filteringSettings);
// 绘制天空盒
if (camera.clearFlags == CameraClearFlags.Skybox && RenderSettings.skybox != null && queue != RenderQueueRange.transparent)
{
context.DrawSkybox(camera);
}
// 提交渲染按
context.Submit();
// 结束渲染摄像机
RenderPipeline.EndCameraRendering(context, camera);
}
创建着色器
分别创建shader1.shader, shader2.shader两个着色器资源,并将下列代码填入
shader1:
hlsl
Shader "Custom/mShader1"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
}
SubShader
{
Blend One OneMinusSrcAlpha
Pass
{
Tags { "LightMode"="shader1" }
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
// make fog work
#pragma multi_compile_fog
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
UNITY_FOG_COORDS(1)
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
float4 _MainTex_ST;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
UNITY_TRANSFER_FOG(o,o.vertex);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
// sample the texture
fixed4 col = tex2D(_MainTex, i.uv);
// apply fog
UNITY_APPLY_FOG(i.fogCoord, col);
return col;
}
ENDCG
}
}
}
shader2:
shader
Shader "Custom/mShader2"
{
Properties
{
_MainTex ("Texture", 2D) = "#A84242" {}
_MainColor("color", Color) = (1,1,1,1)
}
SubShader
{
Tags{
"Queue" = "Transparent"
"RenderType"="Transparent"
"PreviewType"="Plane"
}
Cull Off
//Lighting On
//ZWrite On
Blend One OneMinusSrcAlpha
Pass
{
Tags { "LightMode"="shader2" }
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
// make fog work
#pragma multi_compile_fog
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
UNITY_FOG_COORDS(1)
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
float4 _MainTex_ST;
fixed4 _MainColor;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
UNITY_TRANSFER_FOG(o,o.vertex);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
// sample the texture
fixed4 col = tex2D(_MainTex, i.uv) * _MainColor;
// apply fog
//UNITY_APPLY_FOG(i.fogCoord, col);
col.a = _MainColor.a;
return col;
}
ENDCG
}
}
}
测试效果
然后创建材质mat1.materil、mat2.materil。 mat1关联shader1,mat2关联shader2,将mat2的color透明度调到100
在场景中创建cube1和cube2两个方形物体,这时我们是什么都看不到的。
拖动cube2,一部分挡在cube1前面,并将mat1赋给cube1,mat2赋给cube2。此时我们就能看到自定义渲染管线生效了