No Description
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

NewPostProcessRendererFeature.cs.txt 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337
  1. using UnityEngine;
  2. using UnityEngine.Rendering;
  3. using UnityEngine.Rendering.RenderGraphModule;
  4. using UnityEngine.Rendering.Universal;
  5. // This is the script template for creating a ScriptableRendererFeature meant for a post-processing effect
  6. //
  7. // To see how this feature is made to work with on a custom VolumeComponent observe the "AddRenderPasses" and "ExecuteMainPass" methods
  8. //
  9. // For a general guide on how to create custom ScriptableRendererFeatures see the following URP documentation page:
  10. // https://docs.unity3d.com/Packages/com.unity.render-pipelines.universal@latest/index.html?subfolder=/manual/renderer-features/create-custom-renderer-feature.html
  11. public sealed class #FEATURE_TYPE# : ScriptableRendererFeature
  12. {
  13. #region FEATURE_FIELDS
  14. // * The material used to render the post-processing effect
  15. // * The 'SerializeField' attribute makes sure that the private material reference we assign in the "Create" method
  16. // while in the editor will be serialised and the referenced material will be included in the player build
  17. // * To not clutter the renderer feature UI we're keeping this field hidden, but if you'd like to be able to change
  18. // the material in the editor UI you could just make this field public and remove the current attributes
  19. [SerializeField]
  20. [HideInInspector]
  21. private Material m_Material;
  22. // The user defined ScriptableRenderPass that is responsible for the actual rendering of the effect
  23. private CustomPostRenderPass m_FullScreenPass;
  24. #endregion
  25. #region FEATURE_METHODS
  26. public override void Create()
  27. {
  28. #if UNITY_EDITOR
  29. // * This assigns a material asset reference while in the editor and the "[SerializeField]" attribute on the
  30. // private `m_Material` field will make sure that the referenced material will be included in player builds
  31. // * Alternatively, you could create a material from the shader at runtime e.g.:
  32. // 'm_Material = new Material(m_Shader);'
  33. // In this case for the shader referenced by 'm_Shader' to be included in builds you will have to either:
  34. // 1) Assign 'm_Shader = Shader.Find("Shader Graphs/FullscreenInvertColors")' behind UNITY_EDITOR only and make sure 'm_Shader' is a "[SerializedField]"
  35. // 2) Or add "Shader Graphs/FullscreenInvertColors" to the "Always Included Shaders List" under "ProjectSettings"-> "Graphics" -> "Shader Settings"
  36. // and call 'm_Shader = Shader.Find("Shader Graphs/FullscreenInvertColors")' outside of the UNITY_EDITOR section
  37. if (m_Material == null)
  38. m_Material = UnityEditor.AssetDatabase.LoadAssetAtPath<Material>("Packages/com.unity.render-pipelines.universal/Runtime/Materials/FullscreenInvertColors.mat");
  39. #endif
  40. if(m_Material)
  41. m_FullScreenPass = new CustomPostRenderPass(name, m_Material);
  42. }
  43. // Here you can inject one or multiple render passes in the renderer.
  44. // This method is called when setting up the renderer once per-camera.
  45. public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
  46. {
  47. // Skip rendering if m_Material or the pass instance are null for whatever reason
  48. if (m_Material == null || m_FullScreenPass == null)
  49. return;
  50. // This check makes sure to not render the effect to reflection probes or preview cameras as post-processing is typically not desired there
  51. if (renderingData.cameraData.cameraType == CameraType.Preview || renderingData.cameraData.cameraType == CameraType.Reflection)
  52. return;
  53. // You can control the rendering of your feature using custom post-processing VolumeComponents
  54. //
  55. // E.g. when controlling rendering with a VolumeComponent you will typically want to skip rendering as an optimization when the component
  56. // has settings which would make it imperceptible (e.g. the implementation of IsActive() might return false when some "intensity" value is 0).
  57. //
  58. // N.B. if your volume component type is actually defined in C# it is unlikely that VolumeManager would return a "null" instance of it as
  59. // GlobalSettings should always contain an instance of all VolumeComponents in the project even if if they're not overriden in the scene
  60. #VOLUME_TYPE# myVolume = VolumeManager.instance.stack?.GetComponent<#VOLUME_TYPE#>();
  61. if (myVolume == null || !myVolume.IsActive())
  62. return;
  63. // Here you specify at which part of the frame the effect will execute
  64. //
  65. // When creating post-processing effects you will almost always want to use on of the following injection points:
  66. // BeforeRenderingTransparents - in cases you want your effect to be visible behind transparent objects
  67. // BeforeRenderingPostProcessing - in cases where your effect is supposed to run before the URP post-processing stack
  68. // AfterRenderingPostProcessing - in cases where your effect is supposed to run after the URP post-processing stack, but before FXAA, upscaling or color grading
  69. m_FullScreenPass.renderPassEvent = RenderPassEvent.AfterRenderingPostProcessing;
  70. // You can specify if your effect needs scene depth, normals, motion vectors or a downscaled opaque color as input
  71. //
  72. // You specify them as a mask e.g. ScriptableRenderPassInput.Normals | ScriptableRenderPassInput.Motion and URP
  73. // will either reuse these if they've been generated earlier in the frame or will add passes to generate them.
  74. //
  75. // The inputs will get bound as global shader texture properties and can be sampled in the shader using using the following:
  76. // * Depth - use "SampleSceneDepth" after including "Packages/com.unity.render-pipelines.universal/ShaderLibrary/DeclareNormalsTexture.hlsl"
  77. // * Normal - use "SampleSceneNormals" after including "Packages/com.unity.render-pipelines.universal/ShaderLibrary/DeclareNormalsTexture.hlsl"
  78. // * Opaque Scene Color - use "SampleSceneColor" after including "Packages/com.unity.render-pipelines.universal/ShaderLibrary/DeclareOpaqueTexture.hlsl".
  79. // Note that "OpaqueSceneColor" is a texture containing a possibly downscaled copy of the framebuffer from before rendering transparent objects which
  80. // should not be your first choice when wanting to do a post-processing effect, for that this template will copy the active texture for sampling which is more expensive
  81. // * Motion Vectors - you currently need to declare and sample the texture as follows:
  82. // TEXTURE2D_X(_MotionVectorTexture);
  83. // ...
  84. // LOAD_TEXTURE2D_X_LOD(_MotionVectorTexture, pixelCoords, 0).xy
  85. //
  86. // N.B. when using the FullScreenPass Shader Graph target you should simply use the "URP Sample Buffer" node which will handle the above for you
  87. m_FullScreenPass.ConfigureInput(ScriptableRenderPassInput.None);
  88. renderer.EnqueuePass(m_FullScreenPass);
  89. }
  90. protected override void Dispose(bool disposing)
  91. {
  92. // We dispose the pass we created to free the resources it might be holding onto
  93. m_FullScreenPass.Dispose();
  94. }
  95. #endregion
  96. private class CustomPostRenderPass : ScriptableRenderPass
  97. {
  98. #region PASS_FIELDS
  99. // The material used to render the post-processing effect
  100. private Material m_Material;
  101. // The handle to the temporary color copy texture (only used in the non-render graph path)
  102. private RTHandle m_CopiedColor;
  103. // The property block used to set additional properties for the material
  104. private static MaterialPropertyBlock s_SharedPropertyBlock = new MaterialPropertyBlock();
  105. // This constant is meant to showcase how to create a copy color pass that is needed for most post-processing effects
  106. private static readonly bool kCopyActiveColor = true;
  107. // This constant is meant to showcase how you can add dept-stencil support to your main pass
  108. private static readonly bool kBindDepthStencilAttachment = false;
  109. // Creating some shader properties in advance as this is slightly more efficient than referencing them by string
  110. private static readonly int kBlitTexturePropertyId = Shader.PropertyToID("_BlitTexture");
  111. private static readonly int kBlitScaleBiasPropertyId = Shader.PropertyToID("_BlitScaleBias");
  112. #endregion
  113. public CustomPostRenderPass(string passName, Material material)
  114. {
  115. profilingSampler = new ProfilingSampler(passName);
  116. m_Material = material;
  117. // * The 'requiresIntermediateTexture' field needs to be set to 'true' when a ScriptableRenderPass intends to sample
  118. // the active color buffer
  119. // * This will make sure that URP will not apply the optimization of rendering the entire frame to the write-only backbuffer,
  120. // but will instead render to intermediate textures that can be sampled, which is typically needed for post-processing
  121. requiresIntermediateTexture = kCopyActiveColor;
  122. }
  123. #region PASS_SHARED_RENDERING_CODE
  124. // This method contains the shared rendering logic for doing the temporary color copy pass (used by both the non-render graph and render graph paths)
  125. private static void ExecuteCopyColorPass(RasterCommandBuffer cmd, RTHandle sourceTexture)
  126. {
  127. Blitter.BlitTexture(cmd, sourceTexture, new Vector4(1, 1, 0, 0), 0.0f, false);
  128. }
  129. // This method contains the shared rendering logic for doing the main post-processing pass (used by both the non-render graph and render graph paths)
  130. private static void ExecuteMainPass(RasterCommandBuffer cmd, RTHandle sourceTexture, Material material)
  131. {
  132. s_SharedPropertyBlock.Clear();
  133. if(sourceTexture != null)
  134. s_SharedPropertyBlock.SetTexture(kBlitTexturePropertyId, sourceTexture);
  135. // This uniform needs to be set for user materials with shaders relying on core Blit.hlsl to work as expected
  136. s_SharedPropertyBlock.SetVector(kBlitScaleBiasPropertyId, new Vector4(1, 1, 0, 0));
  137. // USING A CUSTOM VOLUME COMPONENT:
  138. //
  139. // To control the rendering of your effect using a custom VolumeComponent you can set the material's properties
  140. // based on the blended values of your VolumeComponent by querying them with the core VolumeManager API e.g.:
  141. #VOLUME_TYPE# myVolume = VolumeManager.instance.stack?.GetComponent<#VOLUME_TYPE#>();
  142. if (myVolume != null)
  143. s_SharedPropertyBlock.SetFloat("_Intensity", myVolume.intensity.value);
  144. cmd.DrawProcedural(Matrix4x4.identity, material, 0, MeshTopology.Triangles, 3, 1, s_SharedPropertyBlock);
  145. }
  146. // This method is used to get the descriptor used for creating the temporary color copy texture that will enable the main pass to sample the screen color
  147. private static RenderTextureDescriptor GetCopyPassTextureDescriptor(RenderTextureDescriptor desc)
  148. {
  149. // Unless 'desc.bindMS = true' for an MSAA texture a resolve pass will be inserted before it is bound for sampling.
  150. // Since our main pass shader does not expect to sample an MSAA target we will leave 'bindMS = false'.
  151. // If the camera target has MSAA enabled an MSAA resolve will still happen before our copy-color pass but
  152. // with this change we will avoid an unnecessary MSAA resolve before our main pass.
  153. desc.msaaSamples = 1;
  154. // This avoids copying the depth buffer tied to the current descriptor as the main pass in this example does not use it
  155. desc.depthBufferBits = (int)DepthBits.None;
  156. return desc;
  157. }
  158. #endregion
  159. #region PASS_NON_RENDER_GRAPH_PATH
  160. // This method is called before executing the render pass (non-render graph path only).
  161. // It can be used to configure render targets and their clear state. Also to create temporary render target textures.
  162. // When empty this render pass will render to the active camera render target.
  163. // You should never call CommandBuffer.SetRenderTarget. Instead call <c>ConfigureTarget</c> and <c>ConfigureClear</c>.
  164. // The render pipeline will ensure target setup and clearing happens in a performant manner.
  165. public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
  166. {
  167. // This ScriptableRenderPass manages its own RenderTarget.
  168. // ResetTarget here so that ScriptableRenderer's active attachment can be invalidated when processing this ScriptableRenderPass.
  169. ResetTarget();
  170. // This allocates our intermediate texture for the non-RG path and makes sure it's reallocated if some settings on the camera target change (e.g. resolution)
  171. if (kCopyActiveColor)
  172. RenderingUtils.ReAllocateHandleIfNeeded(ref m_CopiedColor, GetCopyPassTextureDescriptor(renderingData.cameraData.cameraTargetDescriptor), name: "_CustomPostPassCopyColor");
  173. }
  174. // Here you can implement the rendering logic (non-render graph path only).
  175. // Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers
  176. // https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html
  177. // You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline.
  178. public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
  179. {
  180. ref var cameraData = ref renderingData.cameraData;
  181. var cmd = CommandBufferPool.Get();
  182. using (new ProfilingScope(cmd, profilingSampler))
  183. {
  184. RasterCommandBuffer rasterCmd = CommandBufferHelpers.GetRasterCommandBuffer(cmd);
  185. if (kCopyActiveColor)
  186. {
  187. CoreUtils.SetRenderTarget(cmd, m_CopiedColor);
  188. ExecuteCopyColorPass(rasterCmd, cameraData.renderer.cameraColorTargetHandle);
  189. }
  190. if(kBindDepthStencilAttachment)
  191. CoreUtils.SetRenderTarget(cmd, cameraData.renderer.cameraColorTargetHandle, cameraData.renderer.cameraDepthTargetHandle);
  192. else
  193. CoreUtils.SetRenderTarget(cmd, cameraData.renderer.cameraColorTargetHandle);
  194. ExecuteMainPass(rasterCmd, kCopyActiveColor ? m_CopiedColor : null, m_Material);
  195. }
  196. context.ExecuteCommandBuffer(cmd);
  197. cmd.Clear();
  198. CommandBufferPool.Release(cmd);
  199. }
  200. // Cleanup any allocated resources that were created during the execution of this render pass (non-render graph path only)
  201. public override void OnCameraCleanup(CommandBuffer cmd)
  202. {
  203. }
  204. public void Dispose()
  205. {
  206. m_CopiedColor?.Release();
  207. }
  208. #endregion
  209. #region PASS_RENDER_GRAPH_PATH
  210. // The custom copy color pass data that will be passed at render graph execution to the lambda we set with "SetRenderFunc" during render graph setup
  211. private class CopyPassData
  212. {
  213. public TextureHandle inputTexture;
  214. }
  215. // The custom main pass data that will be passed at render graph execution to the lambda we set with "SetRenderFunc" during render graph setup
  216. private class MainPassData
  217. {
  218. public Material material;
  219. public TextureHandle inputTexture;
  220. }
  221. private static void ExecuteCopyColorPass(CopyPassData data, RasterGraphContext context)
  222. {
  223. ExecuteCopyColorPass(context.cmd, data.inputTexture);
  224. }
  225. private static void ExecuteMainPass(MainPassData data, RasterGraphContext context)
  226. {
  227. ExecuteMainPass(context.cmd, data.inputTexture.IsValid() ? data.inputTexture : null, data.material);
  228. }
  229. // Here you can implement the rendering logic for the render graph path
  230. public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData)
  231. {
  232. UniversalResourceData resourcesData = frameData.Get<UniversalResourceData>();
  233. UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
  234. UniversalRenderer renderer = (UniversalRenderer) cameraData.renderer;
  235. var colorCopyDescriptor = GetCopyPassTextureDescriptor(cameraData.cameraTargetDescriptor);
  236. TextureHandle copiedColor = TextureHandle.nullHandle;
  237. // Below is an example of a typical post-processing effect which first copies the active color and uses it as input in the second pass.
  238. // Feel free modify/rename/add additional or remove the existing passes based on the needs of your custom post-processing effect
  239. // Intermediate color copy pass
  240. // * This pass makes a temporary copy of the active color target for sampling
  241. // * This is needed as GPU graphics pipelines don't allow to sample the texture bound as the active color target
  242. // * This copy can be avoided if you won't need to sample the color target or will only need to render/blend on top of it
  243. if (kCopyActiveColor)
  244. {
  245. copiedColor = UniversalRenderer.CreateRenderGraphTexture(renderGraph, colorCopyDescriptor, "_CustomPostPassColorCopy", false);
  246. using (var builder = renderGraph.AddRasterRenderPass<CopyPassData>("CustomPostPass_CopyColor", out var passData, profilingSampler))
  247. {
  248. passData.inputTexture = resourcesData.activeColorTexture;
  249. builder.UseTexture(resourcesData.activeColorTexture, AccessFlags.Read);
  250. builder.SetRenderAttachment(copiedColor, 0, AccessFlags.Write);
  251. builder.SetRenderFunc((CopyPassData data, RasterGraphContext context) => ExecuteCopyColorPass(data, context));
  252. }
  253. }
  254. // Main pass
  255. // * This pass samples the previously created screen color texture and applies the example post-processing effect to it
  256. using (var builder = renderGraph.AddRasterRenderPass<MainPassData>("CustomPostPass", out var passData, profilingSampler))
  257. {
  258. passData.material = m_Material;
  259. // You must declare the intent to sample the previously generated color copy texture explicitly for render graph to know
  260. if (kCopyActiveColor)
  261. {
  262. passData.inputTexture = copiedColor;
  263. builder.UseTexture(copiedColor, AccessFlags.Read);
  264. }
  265. builder.SetRenderAttachment(resourcesData.activeColorTexture, 0, AccessFlags.Write);
  266. // This branch is currently not taken, but if your main pass needed the depth and/or stencil buffer to be bound this is how you would do it
  267. if(kBindDepthStencilAttachment)
  268. builder.SetRenderAttachmentDepth(resourcesData.activeDepthTexture, AccessFlags.Write);
  269. builder.SetRenderFunc((MainPassData data, RasterGraphContext context) => ExecuteMainPass(data, context));
  270. }
  271. }
  272. #endregion
  273. }
  274. }