Без опису
Ви не можете вибрати більше 25 тем Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363
  1. using System;
  2. using UnityEngine.Rendering.RenderGraphModule;
  3. using UnityEngine.Experimental.Rendering;
  4. namespace UnityEngine.Rendering.Universal.Internal
  5. {
  6. /// <summary>
  7. /// Copy the given color target to the current camera target
  8. ///
  9. /// You can use this pass to copy the result of rendering to
  10. /// the camera target. The pass takes the screen viewport into
  11. /// consideration.
  12. /// </summary>
  13. public class FinalBlitPass : ScriptableRenderPass
  14. {
  15. RTHandle m_Source;
  16. private PassData m_PassData;
  17. static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture");
  18. // Use specialed URP fragment shader pass for debug draw support and color space conversion/encoding support.
  19. // See CoreBlit.shader and BlitHDROverlay.shader
  20. static class BlitPassNames
  21. {
  22. public const string NearestSampler = "NearestDebugDraw";
  23. public const string BilinearSampler = "BilinearDebugDraw";
  24. }
  25. enum BlitType
  26. {
  27. Core = 0, // Core blit
  28. HDR = 1, // Blit with HDR encoding and overlay UI compositing
  29. Count = 2
  30. }
  31. struct BlitMaterialData
  32. {
  33. public Material material;
  34. public int nearestSamplerPass;
  35. public int bilinearSamplerPass;
  36. }
  37. BlitMaterialData[] m_BlitMaterialData;
  38. /// <summary>
  39. /// Creates a new <c>FinalBlitPass</c> instance.
  40. /// </summary>
  41. /// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
  42. /// <param name="blitMaterial">The <c>Material</c> to use for copying the executing the final blit.</param>
  43. /// <param name="blitHDRMaterial">The <c>Material</c> to use for copying the executing the final blit when HDR output is active.</param>
  44. /// <seealso cref="RenderPassEvent"/>
  45. public FinalBlitPass(RenderPassEvent evt, Material blitMaterial, Material blitHDRMaterial)
  46. {
  47. base.profilingSampler = new ProfilingSampler("Blit Final To BackBuffer");
  48. base.useNativeRenderPass = false;
  49. m_PassData = new PassData();
  50. renderPassEvent = evt;
  51. // Find sampler passes by name
  52. const int blitTypeCount = (int)BlitType.Count;
  53. m_BlitMaterialData = new BlitMaterialData[blitTypeCount];
  54. for (int i = 0; i < blitTypeCount; ++i)
  55. {
  56. m_BlitMaterialData[i].material = i == (int)BlitType.Core ? blitMaterial : blitHDRMaterial;
  57. m_BlitMaterialData[i].nearestSamplerPass = m_BlitMaterialData[i].material?.FindPass(BlitPassNames.NearestSampler) ?? -1;
  58. m_BlitMaterialData[i].bilinearSamplerPass = m_BlitMaterialData[i].material?.FindPass(BlitPassNames.BilinearSampler) ?? -1;
  59. }
  60. }
  61. /// <summary>
  62. /// Cleans up resources used by the pass.
  63. /// </summary>
  64. public void Dispose()
  65. {
  66. }
  67. /// <summary>
  68. /// Configure the pass
  69. /// </summary>
  70. /// <param name="baseDescriptor"></param>
  71. /// <param name="colorHandle"></param>
  72. [Obsolete("Use RTHandles for colorHandle", true)]
  73. public void Setup(RenderTextureDescriptor baseDescriptor, RenderTargetHandle colorHandle)
  74. {
  75. throw new NotSupportedException("Setup with RenderTargetHandle has been deprecated. Use it with RTHandles instead.");
  76. }
  77. /// <summary>
  78. /// Configure the pass
  79. /// </summary>
  80. /// <param name="baseDescriptor"></param>
  81. /// <param name="colorHandle"></param>
  82. public void Setup(RenderTextureDescriptor baseDescriptor, RTHandle colorHandle)
  83. {
  84. m_Source = colorHandle;
  85. }
  86. static void SetupHDROutput(ColorGamut hdrDisplayColorGamut, Material material, HDROutputUtils.Operation hdrOperation, Vector4 hdrOutputParameters)
  87. {
  88. material.SetVector(ShaderPropertyId.hdrOutputLuminanceParams, hdrOutputParameters);
  89. HDROutputUtils.ConfigureHDROutput(material, hdrDisplayColorGamut, hdrOperation);
  90. }
  91. /// <inheritdoc/>
  92. [Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
  93. public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
  94. {
  95. UniversalCameraData cameraData = renderingData.frameData.Get<UniversalCameraData>();
  96. DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
  97. bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(cameraData.resolveFinalTarget);
  98. if (resolveToDebugScreen)
  99. {
  100. // Disable obsolete warning for internal usage
  101. #pragma warning disable CS0618
  102. ConfigureTarget(debugHandler.DebugScreenColorHandle, debugHandler.DebugScreenDepthHandle);
  103. #pragma warning restore CS0618
  104. }
  105. }
  106. /// <inheritdoc/>
  107. [Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
  108. public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
  109. {
  110. ContextContainer frameData = renderingData.frameData;
  111. UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
  112. bool outputsToHDR = renderingData.cameraData.isHDROutputActive;
  113. bool outputsAlpha = false;
  114. InitPassData(cameraData, ref m_PassData, outputsToHDR ? BlitType.HDR : BlitType.Core, outputsAlpha);
  115. if (m_PassData.blitMaterialData.material == null)
  116. {
  117. Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_PassData.blitMaterialData, GetType().Name);
  118. return;
  119. }
  120. var cameraTarget = RenderingUtils.GetCameraTargetIdentifier(ref renderingData);
  121. DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
  122. bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(cameraData.resolveFinalTarget);
  123. // Get RTHandle alias to use RTHandle apis
  124. RTHandleStaticHelpers.SetRTHandleStaticWrapper(cameraTarget);
  125. var cameraTargetHandle = RTHandleStaticHelpers.s_RTHandleWrapper;
  126. var cmd = renderingData.commandBuffer;
  127. if (m_Source == cameraData.renderer.GetCameraColorFrontBuffer(cmd))
  128. {
  129. m_Source = renderingData.cameraData.renderer.cameraColorTargetHandle;
  130. }
  131. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.FinalBlit)))
  132. {
  133. m_PassData.blitMaterialData.material.enabledKeywords = null;
  134. debugHandler?.UpdateShaderGlobalPropertiesForFinalValidationPass(cmd, cameraData, !resolveToDebugScreen);
  135. cmd.SetKeyword(ShaderGlobalKeywords.LinearToSRGBConversion,
  136. cameraData.requireSrgbConversion);
  137. if (outputsToHDR)
  138. {
  139. VolumeStack stack = VolumeManager.instance.stack;
  140. Tonemapping tonemapping = stack.GetComponent<Tonemapping>();
  141. Vector4 hdrOutputLuminanceParams;
  142. UniversalRenderPipeline.GetHDROutputLuminanceParameters(cameraData.hdrDisplayInformation, cameraData.hdrDisplayColorGamut, tonemapping, out hdrOutputLuminanceParams);
  143. HDROutputUtils.Operation hdrOperation = HDROutputUtils.Operation.None;
  144. // If the HDRDebugView is on, we don't want the encoding
  145. if (debugHandler == null || !debugHandler.HDRDebugViewIsActive(cameraData.resolveFinalTarget))
  146. hdrOperation |= HDROutputUtils.Operation.ColorEncoding;
  147. // Color conversion may have happened in the Uber post process through color grading, so we don't want to reapply it
  148. if (!cameraData.postProcessEnabled)
  149. hdrOperation |= HDROutputUtils.Operation.ColorConversion;
  150. SetupHDROutput(cameraData.hdrDisplayColorGamut, m_PassData.blitMaterialData.material, hdrOperation, hdrOutputLuminanceParams);
  151. }
  152. if (resolveToDebugScreen)
  153. {
  154. // Blit to the debugger texture instead of the camera target
  155. int shaderPassIndex = m_Source.rt?.filterMode == FilterMode.Bilinear ? m_PassData.blitMaterialData.bilinearSamplerPass : m_PassData.blitMaterialData.nearestSamplerPass;
  156. Vector2 viewportScale = m_Source.useScaling ? new Vector2(m_Source.rtHandleProperties.rtHandleScale.x, m_Source.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  157. Blitter.BlitTexture(cmd, m_Source, viewportScale, m_PassData.blitMaterialData.material, shaderPassIndex);
  158. cameraData.renderer.ConfigureCameraTarget(debugHandler.DebugScreenColorHandle, debugHandler.DebugScreenDepthHandle);
  159. }
  160. // TODO RENDERGRAPH: See https://jira.unity3d.com/projects/URP/issues/URP-1737
  161. // This branch of the if statement must be removed for render graph and the new command list with a novel way of using Blitter with fill mode
  162. else if (GL.wireframe && cameraData.isSceneViewCamera)
  163. {
  164. // This set render target is necessary so we change the LOAD state to DontCare.
  165. cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget,
  166. RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, // color
  167. RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare); // depth
  168. cmd.Blit(m_Source.nameID, cameraTargetHandle.nameID);
  169. }
  170. else
  171. {
  172. // TODO: Final blit pass should always blit to backbuffer. The first time we do we don't need to Load contents to tile.
  173. // We need to keep in the pipeline of first render pass to each render target to properly set load/store actions.
  174. // meanwhile we set to load so split screen case works.
  175. var loadAction = RenderBufferLoadAction.DontCare;
  176. if (!cameraData.isSceneViewCamera && !cameraData.isDefaultViewport)
  177. loadAction = RenderBufferLoadAction.Load;
  178. #if ENABLE_VR && ENABLE_XR_MODULE
  179. if (cameraData.xr.enabled)
  180. loadAction = RenderBufferLoadAction.Load;
  181. #endif
  182. CoreUtils.SetRenderTarget(renderingData.commandBuffer, cameraTargetHandle, loadAction, RenderBufferStoreAction.Store, ClearFlag.None, Color.clear);
  183. ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(renderingData.commandBuffer), m_PassData, m_Source, cameraTargetHandle, cameraData);
  184. cameraData.renderer.ConfigureCameraTarget(cameraTargetHandle, cameraTargetHandle);
  185. }
  186. }
  187. }
  188. private static void ExecutePass(RasterCommandBuffer cmd, PassData data, RTHandle source, RTHandle destination, UniversalCameraData cameraData)
  189. {
  190. bool isRenderToBackBufferTarget = !cameraData.isSceneViewCamera;
  191. #if ENABLE_VR && ENABLE_XR_MODULE
  192. if (cameraData.xr.enabled)
  193. isRenderToBackBufferTarget = new RenderTargetIdentifier(destination.nameID, 0, CubemapFace.Unknown, -1) == new RenderTargetIdentifier(cameraData.xr.renderTarget, 0, CubemapFace.Unknown, -1);
  194. #endif
  195. Vector4 scaleBias = RenderingUtils.GetFinalBlitScaleBias(source, destination, cameraData);
  196. if (isRenderToBackBufferTarget)
  197. cmd.SetViewport(cameraData.pixelRect);
  198. // turn off any global wireframe & "scene view wireframe shader hijack" settings for doing blits:
  199. // we never want them to show up as wireframe
  200. cmd.SetWireframe(false);
  201. CoreUtils.SetKeyword(data.blitMaterialData.material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, data.enableAlphaOutput);
  202. int shaderPassIndex = source.rt?.filterMode == FilterMode.Bilinear ? data.blitMaterialData.bilinearSamplerPass : data.blitMaterialData.nearestSamplerPass;
  203. Blitter.BlitTexture(cmd, source, scaleBias, data.blitMaterialData.material, shaderPassIndex);
  204. }
  205. private class PassData
  206. {
  207. internal TextureHandle source;
  208. internal TextureHandle destination;
  209. internal TextureHandle depthTexture;
  210. internal int sourceID;
  211. internal Vector4 hdrOutputLuminanceParams;
  212. internal bool requireSrgbConversion;
  213. internal bool enableAlphaOutput;
  214. internal BlitMaterialData blitMaterialData;
  215. internal UniversalCameraData cameraData;
  216. }
  217. /// <summary>
  218. /// Initialize the shared pass data.
  219. /// </summary>
  220. /// <param name="passData"></param>
  221. private void InitPassData(UniversalCameraData cameraData, ref PassData passData, BlitType blitType, bool enableAlphaOutput)
  222. {
  223. passData.cameraData = cameraData;
  224. passData.requireSrgbConversion = cameraData.requireSrgbConversion;
  225. passData.enableAlphaOutput = enableAlphaOutput;
  226. passData.blitMaterialData = m_BlitMaterialData[(int)blitType];
  227. }
  228. internal void Render(RenderGraph renderGraph, ContextContainer frameData, UniversalCameraData cameraData, in TextureHandle src, in TextureHandle dest, TextureHandle overlayUITexture)
  229. {
  230. using (var builder = renderGraph.AddRasterRenderPass<PassData>(profilingSampler.name, out var passData, profilingSampler))
  231. {
  232. UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
  233. UniversalRenderer renderer = cameraData.renderer as UniversalRenderer;
  234. if (cameraData.requiresDepthTexture && renderer != null)
  235. {
  236. if (renderer.renderingModeActual != RenderingMode.Deferred)
  237. {
  238. builder.UseGlobalTexture(s_CameraDepthTextureID);
  239. passData.depthTexture = resourceData.activeDepthTexture;
  240. }
  241. else if (renderer.deferredLights.GbufferDepthIndex != -1)
  242. {
  243. builder.UseTexture(resourceData.gBuffer[renderer.deferredLights.GbufferDepthIndex]);
  244. passData.depthTexture = resourceData.gBuffer[renderer.deferredLights.GbufferDepthIndex];
  245. }
  246. }
  247. bool outputsToHDR = cameraData.isHDROutputActive;
  248. bool outputsAlpha = cameraData.isAlphaOutputEnabled;
  249. InitPassData(cameraData, ref passData, outputsToHDR ? BlitType.HDR : BlitType.Core, outputsAlpha);
  250. passData.sourceID = ShaderPropertyId.sourceTex;
  251. passData.source = src;
  252. builder.UseTexture(src, AccessFlags.Read);
  253. passData.destination = dest;
  254. builder.SetRenderAttachment(dest, 0, AccessFlags.Write);
  255. #if ENABLE_VR && ENABLE_XR_MODULE
  256. // This is a screen-space pass, make sure foveated rendering is disabled for non-uniform renders
  257. bool passSupportsFoveation = !XRSystem.foveatedRenderingCaps.HasFlag(FoveatedRenderingCaps.NonUniformRaster);
  258. builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && passSupportsFoveation);
  259. #endif
  260. if (outputsToHDR && overlayUITexture.IsValid())
  261. {
  262. VolumeStack stack = VolumeManager.instance.stack;
  263. Tonemapping tonemapping = stack.GetComponent<Tonemapping>();
  264. UniversalRenderPipeline.GetHDROutputLuminanceParameters(passData.cameraData.hdrDisplayInformation, passData.cameraData.hdrDisplayColorGamut, tonemapping, out passData.hdrOutputLuminanceParams);
  265. builder.UseTexture(overlayUITexture, AccessFlags.Read);
  266. }
  267. else
  268. {
  269. passData.hdrOutputLuminanceParams = new Vector4(-1.0f, -1.0f, -1.0f, -1.0f);
  270. }
  271. builder.AllowGlobalStateModification(true);
  272. builder.SetRenderFunc((PassData data, RasterGraphContext context) =>
  273. {
  274. data.blitMaterialData.material.enabledKeywords = null;
  275. context.cmd.SetKeyword(ShaderGlobalKeywords.LinearToSRGBConversion, data.requireSrgbConversion);
  276. data.blitMaterialData.material.SetTexture(data.sourceID, data.source);
  277. if(data.depthTexture.IsValid())
  278. data.blitMaterialData.material.SetTexture(s_CameraDepthTextureID, data.depthTexture);
  279. DebugHandler debugHandler = GetActiveDebugHandler(data.cameraData);
  280. bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(data.cameraData.resolveFinalTarget);
  281. // TODO RENDERGRAPH: this should ideally be shared in ExecutePass to avoid code duplication
  282. if (data.hdrOutputLuminanceParams.w >= 0)
  283. {
  284. HDROutputUtils.Operation hdrOperation = HDROutputUtils.Operation.None;
  285. // If the HDRDebugView is on, we don't want the encoding
  286. if (debugHandler == null || !debugHandler.HDRDebugViewIsActive(data.cameraData.resolveFinalTarget))
  287. hdrOperation |= HDROutputUtils.Operation.ColorEncoding;
  288. // Color conversion may have happened in the Uber post process through color grading, so we don't want to reapply it
  289. if (!data.cameraData.postProcessEnabled)
  290. hdrOperation |= HDROutputUtils.Operation.ColorConversion;
  291. SetupHDROutput(data.cameraData.hdrDisplayColorGamut, data.blitMaterialData.material, hdrOperation, data.hdrOutputLuminanceParams);
  292. }
  293. if (resolveToDebugScreen)
  294. {
  295. RTHandle sourceTex = data.source;
  296. Vector2 viewportScale = sourceTex.useScaling ? new Vector2(sourceTex.rtHandleProperties.rtHandleScale.x, sourceTex.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  297. int shaderPassIndex = sourceTex.rt?.filterMode == FilterMode.Bilinear ? data.blitMaterialData.bilinearSamplerPass : data.blitMaterialData.nearestSamplerPass;
  298. Blitter.BlitTexture(context.cmd, sourceTex, viewportScale, data.blitMaterialData.material, shaderPassIndex);
  299. }
  300. else
  301. ExecutePass(context.cmd, data, data.source, data.destination, data.cameraData);
  302. });
  303. }
  304. }
  305. }
  306. }