Няма описание
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

PostProcessPassRenderGraph.cs 110KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097
  1. using UnityEngine.Experimental.Rendering;
  2. using UnityEngine.Rendering.RenderGraphModule;
  3. using System;
  4. using UnityEngine.Rendering.Universal.Internal;
  5. namespace UnityEngine.Rendering.Universal
  6. {
  7. internal partial class PostProcessPass : ScriptableRenderPass
  8. {
  9. static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture");
  10. static readonly int s_CameraOpaqueTextureID = Shader.PropertyToID("_CameraOpaqueTexture");
  11. private class UpdateCameraResolutionPassData
  12. {
  13. internal Vector2Int newCameraTargetSize;
  14. }
  15. // Updates render target descriptors and shader constants to reflect a new render size
  16. // This should be called immediately after the resolution changes mid-frame (typically after an upscaling operation).
  17. void UpdateCameraResolution(RenderGraph renderGraph, UniversalCameraData cameraData, Vector2Int newCameraTargetSize)
  18. {
  19. // Update the local descriptor and the camera data descriptor to reflect post-upscaled sizes
  20. m_Descriptor.width = newCameraTargetSize.x;
  21. m_Descriptor.height = newCameraTargetSize.y;
  22. cameraData.cameraTargetDescriptor.width = newCameraTargetSize.x;
  23. cameraData.cameraTargetDescriptor.height = newCameraTargetSize.y;
  24. // Update the shader constants to reflect the new camera resolution
  25. using (var builder = renderGraph.AddUnsafePass<UpdateCameraResolutionPassData>("Update Camera Resolution", out var passData))
  26. {
  27. passData.newCameraTargetSize = newCameraTargetSize;
  28. // This pass only modifies shader constants so we need to set some special flags to ensure it isn't culled or optimized away
  29. builder.AllowGlobalStateModification(true);
  30. builder.AllowPassCulling(false);
  31. builder.SetRenderFunc(static (UpdateCameraResolutionPassData data, UnsafeGraphContext ctx) =>
  32. {
  33. ctx.cmd.SetGlobalVector(
  34. ShaderPropertyId.screenSize,
  35. new Vector4(
  36. data.newCameraTargetSize.x,
  37. data.newCameraTargetSize.y,
  38. 1.0f / data.newCameraTargetSize.x,
  39. 1.0f / data.newCameraTargetSize.y
  40. )
  41. );
  42. });
  43. }
  44. }
  45. #region StopNaNs
  46. private class StopNaNsPassData
  47. {
  48. internal TextureHandle stopNaNTarget;
  49. internal TextureHandle sourceTexture;
  50. internal Material stopNaN;
  51. }
  52. public void RenderStopNaN(RenderGraph renderGraph, RenderTextureDescriptor cameraTargetDescriptor, in TextureHandle activeCameraColor, out TextureHandle stopNaNTarget)
  53. {
  54. var desc = PostProcessPass.GetCompatibleDescriptor(cameraTargetDescriptor,
  55. cameraTargetDescriptor.width,
  56. cameraTargetDescriptor.height,
  57. cameraTargetDescriptor.graphicsFormat,
  58. DepthBits.None);
  59. stopNaNTarget = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, "_StopNaNsTarget", true, FilterMode.Bilinear);
  60. using (var builder = renderGraph.AddRasterRenderPass<StopNaNsPassData>("Stop NaNs", out var passData,
  61. ProfilingSampler.Get(URPProfileId.RG_StopNaNs)))
  62. {
  63. passData.stopNaNTarget = stopNaNTarget;
  64. builder.SetRenderAttachment(stopNaNTarget, 0, AccessFlags.ReadWrite);
  65. passData.sourceTexture = activeCameraColor;
  66. builder.UseTexture(activeCameraColor, AccessFlags.Read);
  67. passData.stopNaN = m_Materials.stopNaN;
  68. builder.SetRenderFunc(static (StopNaNsPassData data, RasterGraphContext context) =>
  69. {
  70. var cmd = context.cmd;
  71. RTHandle sourceTextureHdl = data.sourceTexture;
  72. Vector2 viewportScale = sourceTextureHdl.useScaling? new Vector2(sourceTextureHdl.rtHandleProperties.rtHandleScale.x, sourceTextureHdl.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  73. Blitter.BlitTexture(cmd, sourceTextureHdl, viewportScale, data.stopNaN, 0);
  74. });
  75. }
  76. }
  77. #endregion
  78. #region SMAA
  79. private class SMAASetupPassData
  80. {
  81. internal Vector4 metrics;
  82. internal Texture2D areaTexture;
  83. internal Texture2D searchTexture;
  84. internal float stencilRef;
  85. internal float stencilMask;
  86. internal AntialiasingQuality antialiasingQuality;
  87. internal Material material;
  88. }
  89. private class SMAAPassData
  90. {
  91. internal TextureHandle destinationTexture;
  92. internal TextureHandle sourceTexture;
  93. internal TextureHandle depthStencilTexture;
  94. internal TextureHandle blendTexture;
  95. internal Material material;
  96. }
  97. public void RenderSMAA(RenderGraph renderGraph, UniversalResourceData resourceData, AntialiasingQuality antialiasingQuality, in TextureHandle source, out TextureHandle SMAATarget)
  98. {
  99. var desc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor,
  100. m_Descriptor.width,
  101. m_Descriptor.height,
  102. m_Descriptor.graphicsFormat,
  103. DepthBits.None);
  104. SMAATarget = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, "_SMAATarget", true, FilterMode.Bilinear);
  105. var edgeTextureDesc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor,
  106. m_Descriptor.width,
  107. m_Descriptor.height,
  108. m_SMAAEdgeFormat,
  109. DepthBits.None);
  110. var edgeTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, edgeTextureDesc, "_EdgeStencilTexture", true, FilterMode.Bilinear);
  111. var edgeTextureStencilDesc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor,
  112. m_Descriptor.width,
  113. m_Descriptor.height,
  114. GraphicsFormat.None,
  115. DepthBits.Depth24);
  116. var edgeTextureStencil = UniversalRenderer.CreateRenderGraphTexture(renderGraph, edgeTextureStencilDesc, "_EdgeTexture", true, FilterMode.Bilinear);
  117. var blendTextureDesc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor,
  118. m_Descriptor.width,
  119. m_Descriptor.height,
  120. GraphicsFormat.R8G8B8A8_UNorm,
  121. DepthBits.None);
  122. var blendTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, blendTextureDesc, "_BlendTexture", true, FilterMode.Point);
  123. // Anti-aliasing
  124. var material = m_Materials.subpixelMorphologicalAntialiasing;
  125. using (var builder = renderGraph.AddRasterRenderPass<SMAASetupPassData>("SMAA Material Setup", out var passData, ProfilingSampler.Get(URPProfileId.RG_SMAAMaterialSetup)))
  126. {
  127. const int kStencilBit = 64;
  128. // TODO RENDERGRAPH: handle dynamic scaling
  129. passData.metrics = new Vector4(1f / m_Descriptor.width, 1f / m_Descriptor.height, m_Descriptor.width, m_Descriptor.height);
  130. passData.areaTexture = m_Data.textures.smaaAreaTex;
  131. passData.searchTexture = m_Data.textures.smaaSearchTex;
  132. passData.stencilRef = (float)kStencilBit;
  133. passData.stencilMask = (float)kStencilBit;
  134. passData.antialiasingQuality = antialiasingQuality;
  135. passData.material = material;
  136. builder.AllowPassCulling(false);
  137. builder.SetRenderFunc(static (SMAASetupPassData data, RasterGraphContext context) =>
  138. {
  139. // Globals
  140. data.material.SetVector(ShaderConstants._Metrics, data.metrics);
  141. data.material.SetTexture(ShaderConstants._AreaTexture, data.areaTexture);
  142. data.material.SetTexture(ShaderConstants._SearchTexture, data.searchTexture);
  143. data.material.SetFloat(ShaderConstants._StencilRef, data.stencilRef);
  144. data.material.SetFloat(ShaderConstants._StencilMask, data.stencilMask);
  145. // Quality presets
  146. data.material.shaderKeywords = null;
  147. switch (data.antialiasingQuality)
  148. {
  149. case AntialiasingQuality.Low:
  150. data.material.EnableKeyword(ShaderKeywordStrings.SmaaLow);
  151. break;
  152. case AntialiasingQuality.Medium:
  153. data.material.EnableKeyword(ShaderKeywordStrings.SmaaMedium);
  154. break;
  155. case AntialiasingQuality.High:
  156. data.material.EnableKeyword(ShaderKeywordStrings.SmaaHigh);
  157. break;
  158. }
  159. });
  160. }
  161. using (var builder = renderGraph.AddRasterRenderPass<SMAAPassData>("SMAA Edge Detection", out var passData, ProfilingSampler.Get(URPProfileId.RG_SMAAEdgeDetection)))
  162. {
  163. passData.destinationTexture = edgeTexture;
  164. builder.SetRenderAttachment(edgeTexture, 0, AccessFlags.Write);
  165. passData.depthStencilTexture = edgeTextureStencil;
  166. builder.SetRenderAttachmentDepth(edgeTextureStencil, AccessFlags.Write);
  167. passData.sourceTexture = source;
  168. builder.UseTexture(source, AccessFlags.Read);
  169. builder.UseTexture(resourceData.cameraDepth ,AccessFlags.Read);
  170. passData.material = material;
  171. builder.SetRenderFunc(static (SMAAPassData data, RasterGraphContext context) =>
  172. {
  173. var SMAAMaterial = data.material;
  174. var cmd = context.cmd;
  175. RTHandle sourceTextureHdl = data.sourceTexture;
  176. // Pass 1: Edge detection
  177. Vector2 viewportScale = sourceTextureHdl.useScaling ? new Vector2(sourceTextureHdl.rtHandleProperties.rtHandleScale.x, sourceTextureHdl.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  178. Blitter.BlitTexture(cmd, sourceTextureHdl, viewportScale, SMAAMaterial, 0);
  179. });
  180. }
  181. using (var builder = renderGraph.AddRasterRenderPass<SMAAPassData>("SMAA Blend weights", out var passData, ProfilingSampler.Get(URPProfileId.RG_SMAABlendWeight)))
  182. {
  183. passData.destinationTexture = blendTexture;
  184. builder.SetRenderAttachment(blendTexture, 0, AccessFlags.Write);
  185. passData.depthStencilTexture = edgeTextureStencil;
  186. builder.SetRenderAttachmentDepth(edgeTextureStencil, AccessFlags.Read);
  187. passData.sourceTexture = edgeTexture;
  188. builder.UseTexture(edgeTexture, AccessFlags.Read);
  189. passData.material = material;
  190. builder.SetRenderFunc(static (SMAAPassData data, RasterGraphContext context) =>
  191. {
  192. var SMAAMaterial = data.material;
  193. var cmd = context.cmd;
  194. RTHandle sourceTextureHdl = data.sourceTexture;
  195. // Pass 2: Blend weights
  196. Vector2 viewportScale = sourceTextureHdl.useScaling ? new Vector2(sourceTextureHdl.rtHandleProperties.rtHandleScale.x, sourceTextureHdl.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  197. Blitter.BlitTexture(cmd, sourceTextureHdl, viewportScale, SMAAMaterial, 1);
  198. });
  199. }
  200. using (var builder = renderGraph.AddRasterRenderPass<SMAAPassData>("SMAA Neighborhood blending", out var passData, ProfilingSampler.Get(URPProfileId.RG_SMAANeighborhoodBlend)))
  201. {
  202. builder.AllowGlobalStateModification(true);
  203. passData.destinationTexture = SMAATarget;
  204. builder.SetRenderAttachment(SMAATarget, 0, AccessFlags.Write);
  205. passData.sourceTexture = source;
  206. builder.UseTexture(source, AccessFlags.Read);
  207. passData.blendTexture = blendTexture;
  208. builder.UseTexture(blendTexture, AccessFlags.Read);
  209. passData.material = material;
  210. builder.SetRenderFunc(static (SMAAPassData data, RasterGraphContext context) =>
  211. {
  212. var SMAAMaterial = data.material;
  213. var cmd = context.cmd;
  214. RTHandle sourceTextureHdl = data.sourceTexture;
  215. // Pass 3: Neighborhood blending
  216. SMAAMaterial.SetTexture(ShaderConstants._BlendTexture, data.blendTexture);
  217. Vector2 viewportScale = sourceTextureHdl.useScaling ? new Vector2(sourceTextureHdl.rtHandleProperties.rtHandleScale.x, sourceTextureHdl.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  218. Blitter.BlitTexture(cmd, sourceTextureHdl, viewportScale, SMAAMaterial, 2);
  219. });
  220. }
  221. }
  222. #endregion
  223. #region Bloom
  224. private class UberSetupBloomPassData
  225. {
  226. internal Vector4 bloomParams;
  227. internal Vector4 dirtScaleOffset;
  228. internal float dirtIntensity;
  229. internal Texture dirtTexture;
  230. internal bool highQualityFilteringValue;
  231. internal bool useRGBM;
  232. internal TextureHandle bloomTexture;
  233. internal Material uberMaterial;
  234. }
  235. public void UberPostSetupBloomPass(RenderGraph rendergraph, in TextureHandle bloomTexture, Material uberMaterial)
  236. {
  237. using (var builder = rendergraph.AddRasterRenderPass<UberSetupBloomPassData>("Setup Bloom Post Processing", out var passData, ProfilingSampler.Get(URPProfileId.RG_UberPostSetupBloomPass)))
  238. {
  239. // Setup bloom on uber
  240. var tint = m_Bloom.tint.value.linear;
  241. var luma = ColorUtils.Luminance(tint);
  242. tint = luma > 0f ? tint * (1f / luma) : Color.white;
  243. var bloomParams = new Vector4(m_Bloom.intensity.value, tint.r, tint.g, tint.b);
  244. // Setup lens dirtiness on uber
  245. // Keep the aspect ratio correct & center the dirt texture, we don't want it to be
  246. // stretched or squashed
  247. var dirtTexture = m_Bloom.dirtTexture.value == null ? Texture2D.blackTexture : m_Bloom.dirtTexture.value;
  248. float dirtRatio = dirtTexture.width / (float)dirtTexture.height;
  249. float screenRatio = m_Descriptor.width / (float)m_Descriptor.height;
  250. var dirtScaleOffset = new Vector4(1f, 1f, 0f, 0f);
  251. float dirtIntensity = m_Bloom.dirtIntensity.value;
  252. if (dirtRatio > screenRatio)
  253. {
  254. dirtScaleOffset.x = screenRatio / dirtRatio;
  255. dirtScaleOffset.z = (1f - dirtScaleOffset.x) * 0.5f;
  256. }
  257. else if (screenRatio > dirtRatio)
  258. {
  259. dirtScaleOffset.y = dirtRatio / screenRatio;
  260. dirtScaleOffset.w = (1f - dirtScaleOffset.y) * 0.5f;
  261. }
  262. passData.bloomParams = bloomParams;
  263. passData.dirtScaleOffset = dirtScaleOffset;
  264. passData.dirtIntensity = dirtIntensity;
  265. passData.dirtTexture = dirtTexture;
  266. passData.highQualityFilteringValue = m_Bloom.highQualityFiltering.value;
  267. passData.useRGBM = m_DefaultColorFormatUseRGBM;
  268. passData.bloomTexture = bloomTexture;
  269. builder.UseTexture(bloomTexture, AccessFlags.Read);
  270. passData.uberMaterial = uberMaterial;
  271. // TODO RENDERGRAPH: properly setup dependencies between passes
  272. builder.AllowPassCulling(false);
  273. builder.SetRenderFunc(static (UberSetupBloomPassData data, RasterGraphContext context) =>
  274. {
  275. var uberMaterial = data.uberMaterial;
  276. uberMaterial.SetVector(ShaderConstants._Bloom_Params, data.bloomParams);
  277. uberMaterial.SetFloat(ShaderConstants._Bloom_RGBM, data.useRGBM ? 1f : 0f);
  278. uberMaterial.SetVector(ShaderConstants._LensDirt_Params, data.dirtScaleOffset);
  279. uberMaterial.SetFloat(ShaderConstants._LensDirt_Intensity, data.dirtIntensity);
  280. uberMaterial.SetTexture(ShaderConstants._LensDirt_Texture, data.dirtTexture);
  281. // Keyword setup - a bit convoluted as we're trying to save some variants in Uber...
  282. if (data.highQualityFilteringValue)
  283. uberMaterial.EnableKeyword(data.dirtIntensity > 0f ? ShaderKeywordStrings.BloomHQDirt : ShaderKeywordStrings.BloomHQ);
  284. else
  285. uberMaterial.EnableKeyword(data.dirtIntensity > 0f ? ShaderKeywordStrings.BloomLQDirt : ShaderKeywordStrings.BloomLQ);
  286. uberMaterial.SetTexture(ShaderConstants._Bloom_Texture, data.bloomTexture);
  287. });
  288. }
  289. }
  290. private class BloomPassData
  291. {
  292. internal int mipCount;
  293. internal Material material;
  294. internal Material[] upsampleMaterials;
  295. internal TextureHandle sourceTexture;
  296. internal TextureHandle[] bloomMipUp;
  297. internal TextureHandle[] bloomMipDown;
  298. }
  299. internal struct BloomMaterialParams
  300. {
  301. internal Vector4 parameters;
  302. internal bool highQualityFiltering;
  303. internal bool useRGBM;
  304. internal bool enableAlphaOutput;
  305. internal bool Equals(ref BloomMaterialParams other)
  306. {
  307. return parameters == other.parameters &&
  308. highQualityFiltering == other.highQualityFiltering &&
  309. useRGBM == other.useRGBM &&
  310. enableAlphaOutput == other.enableAlphaOutput;
  311. }
  312. }
  313. public void RenderBloomTexture(RenderGraph renderGraph, in TextureHandle source, out TextureHandle destination, bool enableAlphaOutput)
  314. {
  315. // Start at half-res
  316. int downres = 1;
  317. switch (m_Bloom.downscale.value)
  318. {
  319. case BloomDownscaleMode.Half:
  320. downres = 1;
  321. break;
  322. case BloomDownscaleMode.Quarter:
  323. downres = 2;
  324. break;
  325. default:
  326. throw new ArgumentOutOfRangeException();
  327. }
  328. int tw = m_Descriptor.width >> downres;
  329. int th = m_Descriptor.height >> downres;
  330. // Determine the iteration count
  331. int maxSize = Mathf.Max(tw, th);
  332. int iterations = Mathf.FloorToInt(Mathf.Log(maxSize, 2f) - 1);
  333. int mipCount = Mathf.Clamp(iterations, 1, m_Bloom.maxIterations.value);
  334. // Setup
  335. using(new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_BloomSetup)))
  336. {
  337. // Pre-filtering parameters
  338. float clamp = m_Bloom.clamp.value;
  339. float threshold = Mathf.GammaToLinearSpace(m_Bloom.threshold.value);
  340. float thresholdKnee = threshold * 0.5f; // Hardcoded soft knee
  341. // Material setup
  342. float scatter = Mathf.Lerp(0.05f, 0.95f, m_Bloom.scatter.value);
  343. BloomMaterialParams bloomParams = new BloomMaterialParams();
  344. bloomParams.parameters = new Vector4(scatter, clamp, threshold, thresholdKnee);
  345. bloomParams.highQualityFiltering = m_Bloom.highQualityFiltering.value;
  346. bloomParams.useRGBM = m_DefaultColorFormatUseRGBM;
  347. bloomParams.enableAlphaOutput = enableAlphaOutput;
  348. // Setting keywords can be somewhat expensive on low-end platforms.
  349. // Previous params are cached to avoid setting the same keywords every frame.
  350. var material = m_Materials.bloom;
  351. bool bloomParamsDirty = !m_BloomParamsPrev.Equals(ref bloomParams);
  352. bool isParamsPropertySet = material.HasProperty(ShaderConstants._Params);
  353. if (bloomParamsDirty || !isParamsPropertySet)
  354. {
  355. material.SetVector(ShaderConstants._Params, bloomParams.parameters);
  356. CoreUtils.SetKeyword(material, ShaderKeywordStrings.BloomHQ, bloomParams.highQualityFiltering);
  357. CoreUtils.SetKeyword(material, ShaderKeywordStrings.UseRGBM, bloomParams.useRGBM);
  358. CoreUtils.SetKeyword(material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, bloomParams.enableAlphaOutput);
  359. // These materials are duplicate just to allow different bloom blits to use different textures.
  360. for (uint i = 0; i < k_MaxPyramidSize; ++i)
  361. {
  362. var materialPyramid = m_Materials.bloomUpsample[i];
  363. materialPyramid.SetVector(ShaderConstants._Params, bloomParams.parameters);
  364. CoreUtils.SetKeyword(materialPyramid, ShaderKeywordStrings.BloomHQ, bloomParams.highQualityFiltering);
  365. CoreUtils.SetKeyword(materialPyramid, ShaderKeywordStrings.UseRGBM, bloomParams.useRGBM);
  366. CoreUtils.SetKeyword(materialPyramid, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, bloomParams.enableAlphaOutput);
  367. }
  368. m_BloomParamsPrev = bloomParams;
  369. }
  370. // Create bloom mip pyramid textures
  371. {
  372. var desc = GetCompatibleDescriptor(tw, th, m_DefaultColorFormat);
  373. _BloomMipDown[0] = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, m_BloomMipDown[0].name, false, FilterMode.Bilinear);
  374. _BloomMipUp[0] = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, m_BloomMipUp[0].name, false, FilterMode.Bilinear);
  375. for (int i = 1; i < mipCount; i++)
  376. {
  377. tw = Mathf.Max(1, tw >> 1);
  378. th = Mathf.Max(1, th >> 1);
  379. ref TextureHandle mipDown = ref _BloomMipDown[i];
  380. ref TextureHandle mipUp = ref _BloomMipUp[i];
  381. desc.width = tw;
  382. desc.height = th;
  383. // NOTE: Reuse RTHandle names for TextureHandles
  384. mipDown = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, m_BloomMipDown[i].name, false, FilterMode.Bilinear);
  385. mipUp = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, m_BloomMipUp[i].name, false, FilterMode.Bilinear);
  386. }
  387. }
  388. }
  389. using (var builder = renderGraph.AddUnsafePass<BloomPassData>("Blit Bloom Mipmaps", out var passData, ProfilingSampler.Get(URPProfileId.Bloom)))
  390. {
  391. passData.mipCount = mipCount;
  392. passData.material = m_Materials.bloom;
  393. passData.upsampleMaterials = m_Materials.bloomUpsample;
  394. passData.sourceTexture = source;
  395. passData.bloomMipDown = _BloomMipDown;
  396. passData.bloomMipUp = _BloomMipUp;
  397. // TODO RENDERGRAPH: properly setup dependencies between passes
  398. builder.AllowPassCulling(false);
  399. builder.UseTexture(source, AccessFlags.Read);
  400. for (int i = 0; i < mipCount; i++)
  401. {
  402. builder.UseTexture(_BloomMipDown[i], AccessFlags.ReadWrite);
  403. builder.UseTexture(_BloomMipUp[i], AccessFlags.ReadWrite);
  404. }
  405. builder.SetRenderFunc(static (BloomPassData data, UnsafeGraphContext context) =>
  406. {
  407. // TODO: can't call BlitTexture with unsafe command buffer
  408. var cmd = CommandBufferHelpers.GetNativeCommandBuffer(context.cmd);
  409. var material = data.material;
  410. int mipCount = data.mipCount;
  411. var loadAction = RenderBufferLoadAction.DontCare; // Blit - always write all pixels
  412. var storeAction = RenderBufferStoreAction.Store; // Blit - always read by then next Blit
  413. // Prefilter
  414. using(new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.RG_BloomPrefilter)))
  415. {
  416. Blitter.BlitCameraTexture(cmd, data.sourceTexture, data.bloomMipDown[0], loadAction, storeAction, material, 0);
  417. }
  418. // Downsample - gaussian pyramid
  419. // Classic two pass gaussian blur - use mipUp as a temporary target
  420. // First pass does 2x downsampling + 9-tap gaussian
  421. // Second pass does 9-tap gaussian using a 5-tap filter + bilinear filtering
  422. using(new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.RG_BloomDownsample)))
  423. {
  424. TextureHandle lastDown = data.bloomMipDown[0];
  425. for (int i = 1; i < mipCount; i++)
  426. {
  427. TextureHandle mipDown = data.bloomMipDown[i];
  428. TextureHandle mipUp = data.bloomMipUp[i];
  429. Blitter.BlitCameraTexture(cmd, lastDown, mipUp, loadAction, storeAction, material, 1);
  430. Blitter.BlitCameraTexture(cmd, mipUp, mipDown, loadAction, storeAction, material, 2);
  431. lastDown = mipDown;
  432. }
  433. }
  434. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.RG_BloomUpsample)))
  435. {
  436. // Upsample (bilinear by default, HQ filtering does bicubic instead
  437. for (int i = mipCount - 2; i >= 0; i--)
  438. {
  439. TextureHandle lowMip = (i == mipCount - 2) ? data.bloomMipDown[i + 1] : data.bloomMipUp[i + 1];
  440. TextureHandle highMip = data.bloomMipDown[i];
  441. TextureHandle dst = data.bloomMipUp[i];
  442. // We need a separate material for each upsample pass because setting the low texture mip source
  443. // gets overriden by the time the render func is executed.
  444. // Material is a reference, so all the blits would share the same material state in the cmdbuf.
  445. // NOTE: another option would be to use cmd.SetGlobalTexture().
  446. var upMaterial = data.upsampleMaterials[i];
  447. upMaterial.SetTexture(ShaderConstants._SourceTexLowMip, lowMip);
  448. Blitter.BlitCameraTexture(cmd, highMip, dst, loadAction, storeAction, upMaterial, 3);
  449. }
  450. }
  451. });
  452. destination = passData.bloomMipUp[0];
  453. }
  454. }
  455. #endregion
  456. #region DoF
  457. public void RenderDoF(RenderGraph renderGraph, UniversalResourceData resourceData, UniversalCameraData cameraData, in TextureHandle source, out TextureHandle destination)
  458. {
  459. var dofMaterial = m_DepthOfField.mode.value == DepthOfFieldMode.Gaussian ? m_Materials.gaussianDepthOfField : m_Materials.bokehDepthOfField;
  460. var desc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor,
  461. m_Descriptor.width,
  462. m_Descriptor.height,
  463. m_Descriptor.graphicsFormat,
  464. DepthBits.None);
  465. destination = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, "_DoFTarget", true, FilterMode.Bilinear);
  466. CoreUtils.SetKeyword(dofMaterial, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, cameraData.isAlphaOutputEnabled);
  467. if (m_DepthOfField.mode.value == DepthOfFieldMode.Gaussian)
  468. {
  469. RenderDoFGaussian(renderGraph, resourceData, cameraData, source, destination, ref dofMaterial);
  470. }
  471. else if (m_DepthOfField.mode.value == DepthOfFieldMode.Bokeh)
  472. {
  473. RenderDoFBokeh(renderGraph, resourceData, cameraData, source, destination, ref dofMaterial);
  474. }
  475. }
  476. private class DoFGaussianPassData
  477. {
  478. // Setup
  479. internal int downsample;
  480. internal RenderingData renderingData;
  481. internal Vector3 cocParams;
  482. internal bool highQualitySamplingValue;
  483. // Inputs
  484. internal TextureHandle sourceTexture;
  485. internal TextureHandle depthTexture;
  486. internal Material material;
  487. internal Material materialCoC;
  488. // Pass textures
  489. internal TextureHandle halfCoCTexture;
  490. internal TextureHandle fullCoCTexture;
  491. internal TextureHandle pingTexture;
  492. internal TextureHandle pongTexture;
  493. internal RenderTargetIdentifier[] multipleRenderTargets = new RenderTargetIdentifier[2];
  494. // Output textures
  495. internal TextureHandle destination;
  496. };
  497. public void RenderDoFGaussian(RenderGraph renderGraph, UniversalResourceData resourceData, UniversalCameraData cameraData, in TextureHandle source, TextureHandle destination, ref Material dofMaterial)
  498. {
  499. var material = dofMaterial;
  500. int downSample = 2;
  501. int wh = m_Descriptor.width / downSample;
  502. int hh = m_Descriptor.height / downSample;
  503. // Pass Textures
  504. var fullCoCTextureDesc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor, m_Descriptor.width, m_Descriptor.height, m_GaussianCoCFormat);
  505. var fullCoCTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, fullCoCTextureDesc, "_FullCoCTexture", true, FilterMode.Bilinear);
  506. var halfCoCTextureDesc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor, wh, hh, m_GaussianCoCFormat);
  507. var halfCoCTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, halfCoCTextureDesc, "_HalfCoCTexture", true, FilterMode.Bilinear);
  508. var pingTextureDesc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor, wh, hh, m_DefaultColorFormat);
  509. var pingTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, pingTextureDesc, "_PingTexture", true, FilterMode.Bilinear);
  510. var pongTextureDesc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor, wh, hh, m_DefaultColorFormat);
  511. var pongTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, pongTextureDesc, "_PongTexture", true, FilterMode.Bilinear);
  512. using (var builder = renderGraph.AddUnsafePass<DoFGaussianPassData>("Depth of Field - Gaussian", out var passData))
  513. {
  514. // Setup
  515. float farStart = m_DepthOfField.gaussianStart.value;
  516. float farEnd = Mathf.Max(farStart, m_DepthOfField.gaussianEnd.value);
  517. // Assumes a radius of 1 is 1 at 1080p
  518. // Past a certain radius our gaussian kernel will look very bad so we'll clamp it for
  519. // very high resolutions (4K+).
  520. float maxRadius = m_DepthOfField.gaussianMaxRadius.value * (wh / 1080f);
  521. maxRadius = Mathf.Min(maxRadius, 2f);
  522. passData.downsample = downSample;
  523. passData.cocParams = new Vector3(farStart, farEnd, maxRadius);
  524. passData.highQualitySamplingValue = m_DepthOfField.highQualitySampling.value;
  525. passData.material = material;
  526. passData.materialCoC = m_Materials.gaussianDepthOfFieldCoC;
  527. // Inputs
  528. passData.sourceTexture = source;
  529. builder.UseTexture(source, AccessFlags.Read);
  530. passData.depthTexture = resourceData.cameraDepthTexture;
  531. builder.UseTexture(resourceData.cameraDepthTexture, AccessFlags.Read);
  532. // Pass Textures
  533. passData.fullCoCTexture = fullCoCTexture;
  534. builder.UseTexture(fullCoCTexture, AccessFlags.ReadWrite);
  535. passData.halfCoCTexture = halfCoCTexture;
  536. builder.UseTexture(halfCoCTexture, AccessFlags.ReadWrite);
  537. passData.pingTexture = pingTexture;
  538. builder.UseTexture(pingTexture, AccessFlags.ReadWrite);
  539. passData.pongTexture = pongTexture;
  540. builder.UseTexture(pongTexture, AccessFlags.ReadWrite);
  541. // Outputs
  542. passData.destination = destination;
  543. builder.UseTexture(destination, AccessFlags.Write);
  544. builder.SetRenderFunc(static (DoFGaussianPassData data, UnsafeGraphContext context) =>
  545. {
  546. var dofMat = data.material;
  547. var dofMaterialCoC = data.materialCoC;
  548. var cmd = CommandBufferHelpers.GetNativeCommandBuffer(context.cmd);
  549. RTHandle sourceTextureHdl = data.sourceTexture;
  550. RTHandle dstHdl = data.destination;
  551. // Setup
  552. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_SetupDoF)))
  553. {
  554. dofMat.SetVector(ShaderConstants._CoCParams, data.cocParams);
  555. CoreUtils.SetKeyword(dofMat, ShaderKeywordStrings.HighQualitySampling,
  556. data.highQualitySamplingValue);
  557. dofMaterialCoC.SetVector(ShaderConstants._CoCParams, data.cocParams);
  558. CoreUtils.SetKeyword(dofMaterialCoC, ShaderKeywordStrings.HighQualitySampling,
  559. data.highQualitySamplingValue);
  560. PostProcessUtils.SetSourceSize(cmd, data.sourceTexture);
  561. dofMat.SetVector(ShaderConstants._DownSampleScaleFactor,
  562. new Vector4(1.0f / data.downsample, 1.0f / data.downsample, data.downsample,
  563. data.downsample));
  564. }
  565. // Compute CoC
  566. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_DOFComputeCOC)))
  567. {
  568. dofMat.SetTexture(s_CameraDepthTextureID, data.depthTexture);
  569. Blitter.BlitCameraTexture(cmd, data.sourceTexture, data.fullCoCTexture, data.materialCoC, k_GaussianDoFPassComputeCoc);
  570. }
  571. // Downscale & prefilter color + CoC
  572. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_DOFDownscalePrefilter)))
  573. {
  574. dofMat.SetTexture(ShaderConstants._FullCoCTexture, data.fullCoCTexture);
  575. // Handle packed shader output
  576. data.multipleRenderTargets[0] = data.halfCoCTexture;
  577. data.multipleRenderTargets[1] = data.pingTexture;
  578. CoreUtils.SetRenderTarget(cmd, data.multipleRenderTargets, data.halfCoCTexture);
  579. Vector2 viewportScale = sourceTextureHdl.useScaling ? new Vector2(sourceTextureHdl.rtHandleProperties.rtHandleScale.x, sourceTextureHdl.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  580. Blitter.BlitTexture(cmd, data.sourceTexture, viewportScale, dofMat, k_GaussianDoFPassDownscalePrefilter);
  581. }
  582. // Blur H
  583. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_DOFBlurH)))
  584. {
  585. dofMat.SetTexture(ShaderConstants._HalfCoCTexture, data.halfCoCTexture);
  586. Blitter.BlitCameraTexture(cmd, data.pingTexture, data.pongTexture, dofMat, k_GaussianDoFPassBlurH);
  587. }
  588. // Blur V
  589. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_DOFBlurV)))
  590. {
  591. Blitter.BlitCameraTexture(cmd, data.pongTexture, data.pingTexture, dofMat, k_GaussianDoFPassBlurV);
  592. }
  593. // Composite
  594. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_DOFComposite)))
  595. {
  596. dofMat.SetTexture(ShaderConstants._ColorTexture, data.pingTexture);
  597. dofMat.SetTexture(ShaderConstants._FullCoCTexture, data.fullCoCTexture);
  598. Blitter.BlitCameraTexture(cmd, sourceTextureHdl, dstHdl, dofMat, k_GaussianDoFPassComposite);
  599. }
  600. });
  601. }
  602. }
  603. private class DoFBokehPassData
  604. {
  605. // Setup
  606. internal Vector4[] bokehKernel;
  607. internal int downSample;
  608. internal float uvMargin;
  609. internal Vector4 cocParams;
  610. internal bool useFastSRGBLinearConversion;
  611. // Inputs
  612. internal TextureHandle sourceTexture;
  613. internal TextureHandle depthTexture;
  614. internal Material material;
  615. internal Material materialCoC;
  616. // Pass textures
  617. internal TextureHandle halfCoCTexture;
  618. internal TextureHandle fullCoCTexture;
  619. internal TextureHandle pingTexture;
  620. internal TextureHandle pongTexture;
  621. // Output texture
  622. internal TextureHandle destination;
  623. };
  624. public void RenderDoFBokeh(RenderGraph renderGraph, UniversalResourceData resourceData, UniversalCameraData cameraData, in TextureHandle source, in TextureHandle destination, ref Material dofMaterial)
  625. {
  626. int downSample = 2;
  627. var material = dofMaterial;
  628. int wh = m_Descriptor.width / downSample;
  629. int hh = m_Descriptor.height / downSample;
  630. // Pass Textures
  631. var fullCoCTextureDesc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor, m_Descriptor.width, m_Descriptor.height, GraphicsFormat.R8_UNorm);
  632. var fullCoCTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, fullCoCTextureDesc, "_FullCoCTexture", true, FilterMode.Bilinear);
  633. var pingTextureDesc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor, wh, hh, GraphicsFormat.R16G16B16A16_SFloat);
  634. var pingTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, pingTextureDesc, "_PingTexture", true, FilterMode.Bilinear);
  635. var pongTextureDesc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor, wh, hh, GraphicsFormat.R16G16B16A16_SFloat);
  636. var pongTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, pongTextureDesc, "_PongTexture", true, FilterMode.Bilinear);
  637. using (var builder = renderGraph.AddUnsafePass<DoFBokehPassData>("Depth of Field - Bokeh", out var passData))
  638. {
  639. // Setup
  640. // "A Lens and Aperture Camera Model for Synthetic Image Generation" [Potmesil81]
  641. float F = m_DepthOfField.focalLength.value / 1000f;
  642. float A = m_DepthOfField.focalLength.value / m_DepthOfField.aperture.value;
  643. float P = m_DepthOfField.focusDistance.value;
  644. float maxCoC = (A * F) / (P - F);
  645. float maxRadius = GetMaxBokehRadiusInPixels(m_Descriptor.height);
  646. float rcpAspect = 1f / (wh / (float)hh);
  647. // Prepare the bokeh kernel constant buffer
  648. int hash = m_DepthOfField.GetHashCode();
  649. if (hash != m_BokehHash || maxRadius != m_BokehMaxRadius || rcpAspect != m_BokehRCPAspect)
  650. {
  651. m_BokehHash = hash;
  652. m_BokehMaxRadius = maxRadius;
  653. m_BokehRCPAspect = rcpAspect;
  654. PrepareBokehKernel(maxRadius, rcpAspect);
  655. }
  656. float uvMargin = (1.0f / m_Descriptor.height) * downSample;
  657. passData.bokehKernel = m_BokehKernel;
  658. passData.downSample = downSample;
  659. passData.uvMargin = uvMargin;
  660. passData.cocParams = new Vector4(P, maxCoC, maxRadius, rcpAspect);
  661. passData.useFastSRGBLinearConversion = m_UseFastSRGBLinearConversion;
  662. // Inputs
  663. passData.sourceTexture = source;
  664. builder.UseTexture(source, AccessFlags.Read);
  665. passData.depthTexture = resourceData.cameraDepthTexture;
  666. builder.UseTexture(resourceData.cameraDepthTexture, AccessFlags.Read);
  667. passData.material = material;
  668. passData.materialCoC = m_Materials.bokehDepthOfFieldCoC;
  669. // Pass Textures
  670. passData.fullCoCTexture = fullCoCTexture;
  671. builder.UseTexture(fullCoCTexture, AccessFlags.ReadWrite);
  672. passData.pingTexture = pingTexture;
  673. builder.UseTexture(pingTexture, AccessFlags.ReadWrite);
  674. passData.pongTexture = pongTexture;
  675. builder.UseTexture(pongTexture, AccessFlags.ReadWrite);
  676. // Outputs
  677. passData.destination = destination;
  678. builder.UseTexture(destination, AccessFlags.Write);
  679. // TODO RENDERGRAPH: properly setup dependencies between passes
  680. builder.SetRenderFunc(static (DoFBokehPassData data, UnsafeGraphContext context) =>
  681. {
  682. var dofMat = data.material;
  683. var dofMaterialCoC = data.materialCoC;
  684. var cmd = CommandBufferHelpers.GetNativeCommandBuffer(context.cmd);
  685. RTHandle sourceTextureHdl = data.sourceTexture;
  686. RTHandle dst = data.destination;
  687. // Setup
  688. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_SetupDoF)))
  689. {
  690. CoreUtils.SetKeyword(dofMat, ShaderKeywordStrings.UseFastSRGBLinearConversion,
  691. data.useFastSRGBLinearConversion);
  692. CoreUtils.SetKeyword(dofMaterialCoC, ShaderKeywordStrings.UseFastSRGBLinearConversion,
  693. data.useFastSRGBLinearConversion);
  694. dofMat.SetVector(ShaderConstants._CoCParams, data.cocParams);
  695. dofMat.SetVectorArray(ShaderConstants._BokehKernel, data.bokehKernel);
  696. dofMat.SetVector(ShaderConstants._DownSampleScaleFactor,
  697. new Vector4(1.0f / data.downSample, 1.0f / data.downSample, data.downSample,
  698. data.downSample));
  699. dofMat.SetVector(ShaderConstants._BokehConstants,
  700. new Vector4(data.uvMargin, data.uvMargin * 2.0f));
  701. PostProcessUtils.SetSourceSize(cmd, data.sourceTexture);
  702. }
  703. // Compute CoC
  704. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_DOFComputeCOC)))
  705. {
  706. dofMat.SetTexture(s_CameraDepthTextureID, data.depthTexture);
  707. Blitter.BlitCameraTexture(cmd, sourceTextureHdl, data.fullCoCTexture, dofMat, k_BokehDoFPassComputeCoc);
  708. }
  709. // Downscale and Prefilter Color + CoC
  710. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_DOFDownscalePrefilter)))
  711. {
  712. dofMat.SetTexture(ShaderConstants._FullCoCTexture, data.fullCoCTexture);
  713. Blitter.BlitCameraTexture(cmd, sourceTextureHdl, data.pingTexture, dofMat, k_BokehDoFPassDownscalePrefilter);
  714. }
  715. // Blur
  716. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_DOFBlurBokeh)))
  717. {
  718. Blitter.BlitCameraTexture(cmd, data.pingTexture, data.pongTexture, dofMat, k_BokehDoFPassBlur);
  719. }
  720. // Post Filtering
  721. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_DOFPostFilter)))
  722. {
  723. Blitter.BlitCameraTexture(cmd, data.pongTexture, data.pingTexture, dofMat, k_BokehDoFPassPostFilter);
  724. }
  725. // Composite
  726. using (new ProfilingScope(ProfilingSampler.Get(URPProfileId.RG_DOFComposite)))
  727. {
  728. dofMat.SetTexture(ShaderConstants._DofTexture, data.pingTexture);
  729. Blitter.BlitCameraTexture(cmd, sourceTextureHdl, dst, dofMat, k_BokehDoFPassComposite);
  730. }
  731. });
  732. }
  733. }
  734. #endregion
  735. #region Panini
  736. private class PaniniProjectionPassData
  737. {
  738. internal TextureHandle destinationTexture;
  739. internal TextureHandle sourceTexture;
  740. internal RenderTextureDescriptor sourceTextureDesc;
  741. internal Material material;
  742. internal Vector4 paniniParams;
  743. internal bool isPaniniGeneric;
  744. }
  745. public void RenderPaniniProjection(RenderGraph renderGraph, Camera camera, in TextureHandle source, out TextureHandle destination)
  746. {
  747. var desc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor,
  748. m_Descriptor.width,
  749. m_Descriptor.height,
  750. m_Descriptor.graphicsFormat,
  751. DepthBits.None);
  752. destination = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, "_PaniniProjectionTarget", true, FilterMode.Bilinear);
  753. float distance = m_PaniniProjection.distance.value;
  754. var viewExtents = CalcViewExtents(camera);
  755. var cropExtents = CalcCropExtents(camera, distance);
  756. float scaleX = cropExtents.x / viewExtents.x;
  757. float scaleY = cropExtents.y / viewExtents.y;
  758. float scaleF = Mathf.Min(scaleX, scaleY);
  759. float paniniD = distance;
  760. float paniniS = Mathf.Lerp(1f, Mathf.Clamp01(scaleF), m_PaniniProjection.cropToFit.value);
  761. using (var builder = renderGraph.AddRasterRenderPass<PaniniProjectionPassData>("Panini Projection", out var passData, ProfilingSampler.Get(URPProfileId.PaniniProjection)))
  762. {
  763. builder.AllowGlobalStateModification(true);
  764. passData.destinationTexture = destination;
  765. builder.SetRenderAttachment(destination, 0, AccessFlags.Write);
  766. passData.sourceTexture = source;
  767. builder.UseTexture(source, AccessFlags.Read);
  768. passData.material = m_Materials.paniniProjection;
  769. passData.paniniParams = new Vector4(viewExtents.x, viewExtents.y, paniniD, paniniS);
  770. passData.isPaniniGeneric = 1f - Mathf.Abs(paniniD) > float.Epsilon;
  771. passData.sourceTextureDesc = m_Descriptor;
  772. builder.SetRenderFunc(static (PaniniProjectionPassData data, RasterGraphContext context) =>
  773. {
  774. var cmd = context.cmd;
  775. RTHandle sourceTextureHdl = data.sourceTexture;
  776. cmd.SetGlobalVector(ShaderConstants._Params, data.paniniParams);
  777. data.material.EnableKeyword(data.isPaniniGeneric ? ShaderKeywordStrings.PaniniGeneric : ShaderKeywordStrings.PaniniUnitDistance);
  778. Vector2 viewportScale = sourceTextureHdl.useScaling ? new Vector2(sourceTextureHdl.rtHandleProperties.rtHandleScale.x, sourceTextureHdl.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  779. Blitter.BlitTexture(cmd, sourceTextureHdl, viewportScale, data.material, 0);
  780. });
  781. return;
  782. }
  783. }
  784. #endregion
  785. #region TemporalAA
  786. private const string _TemporalAATargetName = "_TemporalAATarget";
  787. private void RenderTemporalAA(RenderGraph renderGraph, UniversalResourceData resourceData, UniversalCameraData cameraData, ref TextureHandle source, out TextureHandle destination)
  788. {
  789. var desc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor,
  790. m_Descriptor.width,
  791. m_Descriptor.height,
  792. m_Descriptor.graphicsFormat,
  793. DepthBits.None);
  794. destination = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, _TemporalAATargetName, false, FilterMode.Bilinear);
  795. TextureHandle cameraDepth = resourceData.cameraDepth;
  796. TextureHandle motionVectors = resourceData.motionVectorColor;
  797. Debug.Assert(motionVectors.IsValid(), "MotionVectors are invalid. TAA requires a motion vector texture.");
  798. TemporalAA.Render(renderGraph, m_Materials.temporalAntialiasing, cameraData, ref source, ref cameraDepth, ref motionVectors, ref destination);
  799. }
  800. #endregion
  801. #region STP
  802. private const string _UpscaledColorTargetName = "_UpscaledColorTarget";
  803. private void RenderSTP(RenderGraph renderGraph, UniversalResourceData resourceData, UniversalCameraData cameraData, ref TextureHandle source, out TextureHandle destination)
  804. {
  805. TextureHandle cameraDepth = resourceData.cameraDepth;
  806. TextureHandle motionVectors = resourceData.motionVectorColor;
  807. Debug.Assert(motionVectors.IsValid(), "MotionVectors are invalid. STP requires a motion vector texture.");
  808. var desc = GetCompatibleDescriptor(cameraData.cameraTargetDescriptor,
  809. cameraData.pixelWidth,
  810. cameraData.pixelHeight,
  811. cameraData.cameraTargetDescriptor.graphicsFormat,
  812. DepthBits.None);
  813. // STP uses compute shaders so all render textures must enable random writes
  814. desc.enableRandomWrite = true;
  815. // Avoid enabling sRGB because STP works with compute shaders which can't output sRGB automatically.
  816. desc.sRGB = false;
  817. destination = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, _UpscaledColorTargetName, false, FilterMode.Bilinear);
  818. int frameIndex = Time.frameCount;
  819. var noiseTexture = m_Data.textures.blueNoise16LTex[frameIndex & (m_Data.textures.blueNoise16LTex.Length - 1)];
  820. StpUtils.Execute(renderGraph, resourceData, cameraData, source, cameraDepth, motionVectors, destination, noiseTexture);
  821. // Update the camera resolution to reflect the upscaled size
  822. UpdateCameraResolution(renderGraph, cameraData, new Vector2Int(desc.width, desc.height));
  823. }
  824. #endregion
  825. #region MotionBlur
  826. private class MotionBlurPassData
  827. {
  828. internal TextureHandle destinationTexture;
  829. internal TextureHandle sourceTexture;
  830. internal TextureHandle motionVectors;
  831. internal Material material;
  832. internal int passIndex;
  833. internal Camera camera;
  834. internal XRPass xr;
  835. internal float intensity;
  836. internal float clamp;
  837. internal bool enableAlphaOutput;
  838. }
  839. public void RenderMotionBlur(RenderGraph renderGraph, UniversalResourceData resourceData, UniversalCameraData cameraData, in TextureHandle source, out TextureHandle destination)
  840. {
  841. var material = m_Materials.cameraMotionBlur;
  842. var desc = PostProcessPass.GetCompatibleDescriptor(m_Descriptor,
  843. m_Descriptor.width,
  844. m_Descriptor.height,
  845. m_Descriptor.graphicsFormat,
  846. DepthBits.None);
  847. destination = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, "_MotionBlurTarget", true, FilterMode.Bilinear);
  848. TextureHandle motionVectorColor = resourceData.motionVectorColor;
  849. TextureHandle cameraDepthTexture = resourceData.cameraDepthTexture;
  850. var mode = m_MotionBlur.mode.value;
  851. int passIndex = (int)m_MotionBlur.quality.value;
  852. passIndex += (mode == MotionBlurMode.CameraAndObjects) ? 3 : 0;
  853. using (var builder = renderGraph.AddRasterRenderPass<MotionBlurPassData>("Motion Blur", out var passData, ProfilingSampler.Get(URPProfileId.RG_MotionBlur)))
  854. {
  855. builder.AllowGlobalStateModification(true);
  856. passData.destinationTexture = destination;
  857. builder.SetRenderAttachment(destination, 0, AccessFlags.Write);
  858. passData.sourceTexture = source;
  859. builder.UseTexture(source, AccessFlags.Read);
  860. if (mode == MotionBlurMode.CameraAndObjects)
  861. {
  862. Debug.Assert(motionVectorColor.IsValid(), "Motion vectors are invalid. Per-object motion blur requires a motion vector texture.");
  863. passData.motionVectors = motionVectorColor;
  864. builder.UseTexture(motionVectorColor, AccessFlags.Read);
  865. }
  866. else
  867. {
  868. passData.motionVectors = TextureHandle.nullHandle;
  869. }
  870. builder.UseTexture(cameraDepthTexture, AccessFlags.Read);
  871. passData.material = material;
  872. passData.passIndex = passIndex;
  873. passData.camera = cameraData.camera;
  874. passData.xr = cameraData.xr;
  875. passData.enableAlphaOutput = cameraData.isAlphaOutputEnabled;
  876. passData.intensity = m_MotionBlur.intensity.value;
  877. passData.clamp = m_MotionBlur.clamp.value;
  878. builder.SetRenderFunc(static (MotionBlurPassData data, RasterGraphContext context) =>
  879. {
  880. var cmd = context.cmd;
  881. RTHandle sourceTextureHdl = data.sourceTexture;
  882. UpdateMotionBlurMatrices(ref data.material, data.camera, data.xr);
  883. data.material.SetFloat("_Intensity", data.intensity);
  884. data.material.SetFloat("_Clamp", data.clamp);
  885. CoreUtils.SetKeyword(data.material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, data.enableAlphaOutput);
  886. PostProcessUtils.SetSourceSize(cmd, data.sourceTexture);
  887. Vector2 viewportScale = sourceTextureHdl.useScaling ? new Vector2(sourceTextureHdl.rtHandleProperties.rtHandleScale.x, sourceTextureHdl.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  888. Blitter.BlitTexture(cmd, sourceTextureHdl, viewportScale, data.material, data.passIndex);
  889. });
  890. return;
  891. }
  892. }
  893. #endregion
  894. #region LensFlareDataDriven
  895. private class LensFlarePassData
  896. {
  897. internal TextureHandle destinationTexture;
  898. internal RenderTextureDescriptor sourceDescriptor;
  899. internal UniversalCameraData cameraData;
  900. internal Material material;
  901. internal Rect viewport;
  902. internal float paniniDistance;
  903. internal float paniniCropToFit;
  904. internal float width;
  905. internal float height;
  906. internal bool usePanini;
  907. }
  908. void LensFlareDataDrivenComputeOcclusion(RenderGraph renderGraph, UniversalResourceData resourceData, UniversalCameraData cameraData)
  909. {
  910. if (!LensFlareCommonSRP.IsOcclusionRTCompatible())
  911. return;
  912. using (var builder = renderGraph.AddUnsafePass<LensFlarePassData>("Lens Flare Compute Occlusion", out var passData, ProfilingSampler.Get(URPProfileId.LensFlareDataDrivenComputeOcclusion)))
  913. {
  914. RTHandle occH = LensFlareCommonSRP.occlusionRT;
  915. TextureHandle occlusionHandle = renderGraph.ImportTexture(LensFlareCommonSRP.occlusionRT);
  916. passData.destinationTexture = occlusionHandle;
  917. builder.UseTexture(occlusionHandle, AccessFlags.Write);
  918. passData.cameraData = cameraData;
  919. passData.viewport = cameraData.pixelRect;
  920. passData.material = m_Materials.lensFlareDataDriven;
  921. passData.width = (float)m_Descriptor.width;
  922. passData.height = (float)m_Descriptor.height;
  923. if (m_PaniniProjection.IsActive())
  924. {
  925. passData.usePanini = true;
  926. passData.paniniDistance = m_PaniniProjection.distance.value;
  927. passData.paniniCropToFit = m_PaniniProjection.cropToFit.value;
  928. }
  929. else
  930. {
  931. passData.usePanini = false;
  932. passData.paniniDistance = 1.0f;
  933. passData.paniniCropToFit = 1.0f;
  934. }
  935. builder.UseTexture(resourceData.cameraDepthTexture, AccessFlags.Read);
  936. builder.SetRenderFunc(
  937. static (LensFlarePassData data, UnsafeGraphContext ctx) =>
  938. {
  939. Camera camera = data.cameraData.camera;
  940. XRPass xr = data.cameraData.xr;
  941. Matrix4x4 nonJitteredViewProjMatrix0;
  942. int xrId0;
  943. #if ENABLE_VR && ENABLE_XR_MODULE
  944. // Not VR or Multi-Pass
  945. if (xr.enabled)
  946. {
  947. if (xr.singlePassEnabled)
  948. {
  949. nonJitteredViewProjMatrix0 = GL.GetGPUProjectionMatrix(data.cameraData.GetProjectionMatrixNoJitter(0), true) * data.cameraData.GetViewMatrix(0);
  950. xrId0 = 0;
  951. }
  952. else
  953. {
  954. var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, true);
  955. nonJitteredViewProjMatrix0 = gpuNonJitteredProj * camera.worldToCameraMatrix;
  956. xrId0 = data.cameraData.xr.multipassId;
  957. }
  958. }
  959. else
  960. {
  961. nonJitteredViewProjMatrix0 = GL.GetGPUProjectionMatrix(data.cameraData.GetProjectionMatrixNoJitter(0), true) * data.cameraData.GetViewMatrix(0);
  962. xrId0 = 0;
  963. }
  964. #else
  965. var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, true);
  966. nonJitteredViewProjMatrix0 = gpuNonJitteredProj * camera.worldToCameraMatrix;
  967. xrId0 = xr.multipassId;
  968. #endif
  969. LensFlareCommonSRP.ComputeOcclusion(
  970. data.material, camera, xr, xr.multipassId,
  971. data.width, data.height,
  972. data.usePanini, data.paniniDistance, data.paniniCropToFit, true,
  973. camera.transform.position,
  974. nonJitteredViewProjMatrix0,
  975. ctx.cmd,
  976. false, false, null, null);
  977. #if ENABLE_VR && ENABLE_XR_MODULE
  978. if (xr.enabled && xr.singlePassEnabled)
  979. {
  980. //ctx.cmd.SetGlobalTexture(m_Depth.name, m_Depth.nameID);
  981. for (int xrIdx = 1; xrIdx < xr.viewCount; ++xrIdx)
  982. {
  983. Matrix4x4 gpuVPXR = GL.GetGPUProjectionMatrix(data.cameraData.GetProjectionMatrixNoJitter(xrIdx), true) * data.cameraData.GetViewMatrix(xrIdx);
  984. // Bypass single pass version
  985. LensFlareCommonSRP.ComputeOcclusion(
  986. data.material, camera, xr, xrIdx,
  987. data.width, data.height,
  988. data.usePanini, data.paniniDistance, data.paniniCropToFit, true,
  989. camera.transform.position,
  990. gpuVPXR,
  991. ctx.cmd,
  992. false, false, null, null);
  993. }
  994. }
  995. #endif
  996. });
  997. }
  998. }
  999. public void RenderLensFlareDataDriven(RenderGraph renderGraph, UniversalResourceData resourceData, UniversalCameraData cameraData, in TextureHandle destination)
  1000. {
  1001. using (var builder = renderGraph.AddUnsafePass<LensFlarePassData>("Lens Flare Data Driven Pass", out var passData, ProfilingSampler.Get(URPProfileId.LensFlareDataDriven)))
  1002. {
  1003. // Use WriteTexture here because DoLensFlareDataDrivenCommon will call SetRenderTarget internally.
  1004. // TODO RENDERGRAPH: convert SRP core lens flare to be rendergraph friendly
  1005. passData.destinationTexture = destination;
  1006. builder.UseTexture(destination, AccessFlags.Write);
  1007. passData.sourceDescriptor = m_Descriptor;
  1008. passData.cameraData = cameraData;
  1009. passData.material = m_Materials.lensFlareDataDriven;
  1010. passData.width = (float)m_Descriptor.width;
  1011. passData.height = (float)m_Descriptor.height;
  1012. passData.viewport.x = 0.0f;
  1013. passData.viewport.y = 0.0f;
  1014. passData.viewport.width = (float)m_Descriptor.width;
  1015. passData.viewport.height = (float)m_Descriptor.height;
  1016. if (m_PaniniProjection.IsActive())
  1017. {
  1018. passData.usePanini = true;
  1019. passData.paniniDistance = m_PaniniProjection.distance.value;
  1020. passData.paniniCropToFit = m_PaniniProjection.cropToFit.value;
  1021. }
  1022. else
  1023. {
  1024. passData.usePanini = false;
  1025. passData.paniniDistance = 1.0f;
  1026. passData.paniniCropToFit = 1.0f;
  1027. }
  1028. if (LensFlareCommonSRP.IsOcclusionRTCompatible())
  1029. {
  1030. TextureHandle occlusionHandle = renderGraph.ImportTexture(LensFlareCommonSRP.occlusionRT);
  1031. builder.UseTexture(occlusionHandle, AccessFlags.Read);
  1032. }
  1033. else
  1034. {
  1035. builder.UseTexture(resourceData.cameraDepthTexture, AccessFlags.Read);
  1036. }
  1037. builder.SetRenderFunc(static (LensFlarePassData data, UnsafeGraphContext ctx) =>
  1038. {
  1039. Camera camera = data.cameraData.camera;
  1040. XRPass xr = data.cameraData.xr;
  1041. #if ENABLE_VR && ENABLE_XR_MODULE
  1042. // Not VR or Multi-Pass
  1043. if (!xr.enabled ||
  1044. (xr.enabled && !xr.singlePassEnabled))
  1045. #endif
  1046. {
  1047. var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, true);
  1048. Matrix4x4 nonJitteredViewProjMatrix0 = gpuNonJitteredProj * camera.worldToCameraMatrix;
  1049. LensFlareCommonSRP.DoLensFlareDataDrivenCommon(
  1050. data.material, data.cameraData.camera, data.viewport, xr, data.cameraData.xr.multipassId,
  1051. data.width, data.height,
  1052. data.usePanini, data.paniniDistance, data.paniniCropToFit,
  1053. true,
  1054. camera.transform.position,
  1055. nonJitteredViewProjMatrix0,
  1056. ctx.cmd,
  1057. false, false, null, null,
  1058. data.destinationTexture,
  1059. (Light light, Camera cam, Vector3 wo) => { return GetLensFlareLightAttenuation(light, cam, wo); },
  1060. false);
  1061. }
  1062. #if ENABLE_VR && ENABLE_XR_MODULE
  1063. else
  1064. {
  1065. for (int xrIdx = 0; xrIdx < xr.viewCount; ++xrIdx)
  1066. {
  1067. Matrix4x4 nonJitteredViewProjMatrix_k = GL.GetGPUProjectionMatrix(data.cameraData.GetProjectionMatrixNoJitter(xrIdx), true) * data.cameraData.GetViewMatrix(xrIdx);
  1068. LensFlareCommonSRP.DoLensFlareDataDrivenCommon(
  1069. data.material, data.cameraData.camera, data.viewport, xr, data.cameraData.xr.multipassId,
  1070. data.width, data.height,
  1071. data.usePanini, data.paniniDistance, data.paniniCropToFit,
  1072. true,
  1073. camera.transform.position,
  1074. nonJitteredViewProjMatrix_k,
  1075. ctx.cmd,
  1076. false, false, null, null,
  1077. data.destinationTexture,
  1078. (Light light, Camera cam, Vector3 wo) => { return GetLensFlareLightAttenuation(light, cam, wo); },
  1079. false);
  1080. }
  1081. }
  1082. #endif
  1083. });
  1084. }
  1085. }
  1086. #endregion
  1087. #region LensFlareScreenSpace
  1088. private class LensFlareScreenSpacePassData
  1089. {
  1090. internal TextureHandle destinationTexture;
  1091. internal TextureHandle streakTmpTexture;
  1092. internal TextureHandle streakTmpTexture2;
  1093. internal TextureHandle originalBloomTexture;
  1094. internal TextureHandle screenSpaceLensFlareBloomMipTexture;
  1095. internal TextureHandle result;
  1096. internal RenderTextureDescriptor sourceDescriptor;
  1097. internal Camera camera;
  1098. internal Material material;
  1099. internal ScreenSpaceLensFlare lensFlareScreenSpace;
  1100. internal int downsample;
  1101. }
  1102. public TextureHandle RenderLensFlareScreenSpace(RenderGraph renderGraph, Camera camera, in TextureHandle destination, TextureHandle originalBloomTexture, TextureHandle screenSpaceLensFlareBloomMipTexture, bool enableXR)
  1103. {
  1104. var downsample = (int) m_LensFlareScreenSpace.resolution.value;
  1105. int width = m_Descriptor.width / downsample;
  1106. int height = m_Descriptor.height / downsample;
  1107. var streakTextureDesc = GetCompatibleDescriptor(m_Descriptor, width, height, m_DefaultColorFormat);
  1108. var streakTmpTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, streakTextureDesc, "_StreakTmpTexture", true, FilterMode.Bilinear);
  1109. var streakTmpTexture2 = UniversalRenderer.CreateRenderGraphTexture(renderGraph, streakTextureDesc, "_StreakTmpTexture2", true, FilterMode.Bilinear);
  1110. var resultTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, streakTextureDesc, "Lens Flare Screen Space Result", true, FilterMode.Bilinear);
  1111. using (var builder = renderGraph.AddUnsafePass<LensFlareScreenSpacePassData>("Lens Flare Screen Space Pass", out var passData, ProfilingSampler.Get(URPProfileId.LensFlareScreenSpace)))
  1112. {
  1113. // Use WriteTexture here because DoLensFlareScreenSpaceCommon will call SetRenderTarget internally.
  1114. // TODO RENDERGRAPH: convert SRP core lensflare to be rendergraph friendly
  1115. passData.destinationTexture = destination;
  1116. builder.UseTexture(destination, AccessFlags.Write);
  1117. passData.streakTmpTexture = streakTmpTexture;
  1118. builder.UseTexture(streakTmpTexture, AccessFlags.ReadWrite);
  1119. passData.streakTmpTexture2 = streakTmpTexture2;
  1120. builder.UseTexture(streakTmpTexture2, AccessFlags.ReadWrite);
  1121. passData.screenSpaceLensFlareBloomMipTexture = screenSpaceLensFlareBloomMipTexture;
  1122. builder.UseTexture(screenSpaceLensFlareBloomMipTexture, AccessFlags.ReadWrite);
  1123. passData.originalBloomTexture = originalBloomTexture;
  1124. builder.UseTexture(originalBloomTexture, AccessFlags.ReadWrite);
  1125. passData.sourceDescriptor = m_Descriptor;
  1126. passData.camera = camera;
  1127. passData.material = m_Materials.lensFlareScreenSpace;
  1128. passData.lensFlareScreenSpace = m_LensFlareScreenSpace; // NOTE: reference, assumed constant until executed.
  1129. passData.downsample = downsample;
  1130. passData.result = resultTexture;
  1131. builder.UseTexture(resultTexture, AccessFlags.Write);
  1132. builder.SetRenderFunc(static (LensFlareScreenSpacePassData data, UnsafeGraphContext context) =>
  1133. {
  1134. var cmd = context.cmd;
  1135. var camera = data.camera;
  1136. var lensFlareScreenSpace = data.lensFlareScreenSpace;
  1137. LensFlareCommonSRP.DoLensFlareScreenSpaceCommon(
  1138. data.material,
  1139. camera,
  1140. (float)data.sourceDescriptor.width,
  1141. (float)data.sourceDescriptor.height,
  1142. data.lensFlareScreenSpace.tintColor.value,
  1143. data.originalBloomTexture,
  1144. data.screenSpaceLensFlareBloomMipTexture,
  1145. null, // We don't have any spectral LUT in URP
  1146. data.streakTmpTexture,
  1147. data.streakTmpTexture2,
  1148. new Vector4(
  1149. lensFlareScreenSpace.intensity.value,
  1150. lensFlareScreenSpace.firstFlareIntensity.value,
  1151. lensFlareScreenSpace.secondaryFlareIntensity.value,
  1152. lensFlareScreenSpace.warpedFlareIntensity.value),
  1153. new Vector4(
  1154. lensFlareScreenSpace.vignetteEffect.value,
  1155. lensFlareScreenSpace.startingPosition.value,
  1156. lensFlareScreenSpace.scale.value,
  1157. 0), // Free slot, not used
  1158. new Vector4(
  1159. lensFlareScreenSpace.samples.value,
  1160. lensFlareScreenSpace.sampleDimmer.value,
  1161. lensFlareScreenSpace.chromaticAbberationIntensity.value,
  1162. 0), // No need to pass a chromatic aberration sample count, hardcoded at 3 in shader
  1163. new Vector4(
  1164. lensFlareScreenSpace.streaksIntensity.value,
  1165. lensFlareScreenSpace.streaksLength.value,
  1166. lensFlareScreenSpace.streaksOrientation.value,
  1167. lensFlareScreenSpace.streaksThreshold.value),
  1168. new Vector4(
  1169. data.downsample,
  1170. lensFlareScreenSpace.warpedFlareScale.value.x,
  1171. lensFlareScreenSpace.warpedFlareScale.value.y,
  1172. 0), // Free slot, not used
  1173. cmd,
  1174. data.result,
  1175. false);
  1176. });
  1177. return passData.originalBloomTexture;
  1178. }
  1179. }
  1180. #endregion
  1181. static private void ScaleViewportAndBlit(RasterCommandBuffer cmd, RTHandle sourceTextureHdl, RTHandle dest, UniversalCameraData cameraData, Material material)
  1182. {
  1183. Vector4 scaleBias = RenderingUtils.GetFinalBlitScaleBias(sourceTextureHdl, dest, cameraData);
  1184. RenderTargetIdentifier cameraTarget = BuiltinRenderTextureType.CameraTarget;
  1185. #if ENABLE_VR && ENABLE_XR_MODULE
  1186. if (cameraData.xr.enabled)
  1187. cameraTarget = cameraData.xr.renderTarget;
  1188. #endif
  1189. if (dest.nameID == cameraTarget || cameraData.targetTexture != null)
  1190. cmd.SetViewport(cameraData.pixelRect);
  1191. Blitter.BlitTexture(cmd, sourceTextureHdl, scaleBias, material, 0);
  1192. }
  1193. #region FinalPass
  1194. private class PostProcessingFinalSetupPassData
  1195. {
  1196. internal TextureHandle destinationTexture;
  1197. internal TextureHandle sourceTexture;
  1198. internal Material material;
  1199. internal UniversalCameraData cameraData;
  1200. }
  1201. public void RenderFinalSetup(RenderGraph renderGraph, UniversalCameraData cameraData, in TextureHandle source, in TextureHandle destination, ref FinalBlitSettings settings)
  1202. {
  1203. // Scaled FXAA
  1204. using (var builder = renderGraph.AddRasterRenderPass<PostProcessingFinalSetupPassData>("Postprocessing Final Setup Pass", out var passData, ProfilingSampler.Get(URPProfileId.RG_FinalSetup)))
  1205. {
  1206. Material material = m_Materials.scalingSetup;
  1207. if (settings.isFxaaEnabled)
  1208. material.EnableKeyword(ShaderKeywordStrings.Fxaa);
  1209. if (settings.isFsrEnabled)
  1210. material.EnableKeyword(settings.hdrOperations.HasFlag(HDROutputUtils.Operation.ColorEncoding) ? ShaderKeywordStrings.Gamma20AndHDRInput : ShaderKeywordStrings.Gamma20);
  1211. if (settings.hdrOperations.HasFlag(HDROutputUtils.Operation.ColorEncoding))
  1212. SetupHDROutput(cameraData.hdrDisplayInformation, cameraData.hdrDisplayColorGamut, material, settings.hdrOperations);
  1213. if (settings.isAlphaOutputEnabled)
  1214. CoreUtils.SetKeyword(material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, settings.isAlphaOutputEnabled);
  1215. builder.AllowGlobalStateModification(true);
  1216. passData.destinationTexture = destination;
  1217. builder.SetRenderAttachment(destination, 0, AccessFlags.Write);
  1218. passData.sourceTexture = source;
  1219. builder.UseTexture(source, AccessFlags.Read);
  1220. passData.cameraData = cameraData;
  1221. passData.material = material;
  1222. builder.SetRenderFunc(static (PostProcessingFinalSetupPassData data, RasterGraphContext context) =>
  1223. {
  1224. var cmd = context.cmd;
  1225. RTHandle sourceTextureHdl = data.sourceTexture;
  1226. PostProcessUtils.SetSourceSize(cmd, sourceTextureHdl);
  1227. ScaleViewportAndBlit(context.cmd, sourceTextureHdl, data.destinationTexture, data.cameraData, data.material);
  1228. });
  1229. return;
  1230. }
  1231. }
  1232. private class PostProcessingFinalFSRScalePassData
  1233. {
  1234. internal TextureHandle destinationTexture;
  1235. internal TextureHandle sourceTexture;
  1236. internal Material material;
  1237. internal bool enableAlphaOutput;
  1238. }
  1239. public void RenderFinalFSRScale(RenderGraph renderGraph, in TextureHandle source, in TextureHandle destination, bool enableAlphaOutput)
  1240. {
  1241. // FSR upscale
  1242. m_Materials.easu.shaderKeywords = null;
  1243. using (var builder = renderGraph.AddRasterRenderPass<PostProcessingFinalFSRScalePassData>("Postprocessing Final FSR Scale Pass", out var passData, ProfilingSampler.Get(URPProfileId.RG_FinalFSRScale)))
  1244. {
  1245. builder.AllowGlobalStateModification(true);
  1246. passData.destinationTexture = destination;
  1247. builder.SetRenderAttachment(destination, 0, AccessFlags.Write);
  1248. passData.sourceTexture = source;
  1249. builder.UseTexture(source, AccessFlags.Read);
  1250. passData.material = m_Materials.easu;
  1251. passData.enableAlphaOutput = enableAlphaOutput;
  1252. builder.SetRenderFunc(static (PostProcessingFinalFSRScalePassData data, RasterGraphContext context) =>
  1253. {
  1254. var cmd = context.cmd;
  1255. var sourceTex = data.sourceTexture;
  1256. var destTex = data.destinationTexture;
  1257. var material = data.material;
  1258. var enableAlphaOutput = data.enableAlphaOutput;
  1259. RTHandle sourceHdl = (RTHandle)sourceTex;
  1260. RTHandle destHdl = (RTHandle)destTex;
  1261. var fsrInputSize = new Vector2(sourceHdl.referenceSize.x, sourceHdl.referenceSize.y);
  1262. var fsrOutputSize = new Vector2(destHdl.referenceSize.x, destHdl.referenceSize.y);
  1263. FSRUtils.SetEasuConstants(cmd, fsrInputSize, fsrInputSize, fsrOutputSize);
  1264. CoreUtils.SetKeyword(material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, enableAlphaOutput);
  1265. Vector2 viewportScale = sourceHdl.useScaling ? new Vector2(sourceHdl.rtHandleProperties.rtHandleScale.x, sourceHdl.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  1266. Blitter.BlitTexture(cmd, sourceHdl, viewportScale, material, 0);
  1267. });
  1268. return;
  1269. }
  1270. }
  1271. private class PostProcessingFinalBlitPassData
  1272. {
  1273. internal TextureHandle destinationTexture;
  1274. internal TextureHandle sourceTexture;
  1275. internal Material material;
  1276. internal UniversalCameraData cameraData;
  1277. internal FinalBlitSettings settings;
  1278. }
  1279. /// <summary>
  1280. /// Final blit settings.
  1281. /// </summary>
  1282. public struct FinalBlitSettings
  1283. {
  1284. /// <summary>Is FXAA enabled</summary>
  1285. public bool isFxaaEnabled;
  1286. /// <summary>Is FSR Enabled.</summary>
  1287. public bool isFsrEnabled;
  1288. /// <summary>Is TAA sharpening enabled.</summary>
  1289. public bool isTaaSharpeningEnabled;
  1290. /// <summary>True if final blit requires HDR output.</summary>
  1291. public bool requireHDROutput;
  1292. /// <summary>True if final blit needs to resolve to debug screen.</summary>
  1293. public bool resolveToDebugScreen;
  1294. /// <summary>True if final blit needs to output alpha channel.</summary>
  1295. public bool isAlphaOutputEnabled;
  1296. /// <summary>HDR Operations</summary>
  1297. public HDROutputUtils.Operation hdrOperations;
  1298. /// <summary>
  1299. /// Create FinalBlitSettings
  1300. /// </summary>
  1301. /// <returns>New FinalBlitSettings</returns>
  1302. public static FinalBlitSettings Create()
  1303. {
  1304. FinalBlitSettings s = new FinalBlitSettings();
  1305. s.isFxaaEnabled = false;
  1306. s.isFsrEnabled = false;
  1307. s.isTaaSharpeningEnabled = false;
  1308. s.requireHDROutput = false;
  1309. s.resolveToDebugScreen = false;
  1310. s.isAlphaOutputEnabled = false;
  1311. s.hdrOperations = HDROutputUtils.Operation.None;
  1312. return s;
  1313. }
  1314. };
  1315. public void RenderFinalBlit(RenderGraph renderGraph, UniversalCameraData cameraData, in TextureHandle source, in TextureHandle overlayUITexture, in TextureHandle postProcessingTarget, ref FinalBlitSettings settings)
  1316. {
  1317. using (var builder = renderGraph.AddRasterRenderPass<PostProcessingFinalBlitPassData>("Postprocessing Final Blit Pass", out var passData, ProfilingSampler.Get(URPProfileId.RG_FinalBlit)))
  1318. {
  1319. builder.AllowGlobalStateModification(true);
  1320. passData.destinationTexture = postProcessingTarget;
  1321. builder.SetRenderAttachment(postProcessingTarget, 0, AccessFlags.Write);
  1322. passData.sourceTexture = source;
  1323. builder.UseTexture(source, AccessFlags.Read);
  1324. passData.cameraData = cameraData;
  1325. passData.material = m_Materials.finalPass;
  1326. passData.settings = settings;
  1327. if (settings.requireHDROutput && m_EnableColorEncodingIfNeeded)
  1328. builder.UseTexture(overlayUITexture, AccessFlags.Read);
  1329. builder.SetRenderFunc(static (PostProcessingFinalBlitPassData data, RasterGraphContext context) =>
  1330. {
  1331. var cmd = context.cmd;
  1332. var material = data.material;
  1333. var isFxaaEnabled = data.settings.isFxaaEnabled;
  1334. var isFsrEnabled = data.settings.isFsrEnabled;
  1335. var isRcasEnabled = data.settings.isTaaSharpeningEnabled;
  1336. var requireHDROutput = data.settings.requireHDROutput;
  1337. var resolveToDebugScreen = data.settings.resolveToDebugScreen;
  1338. var isAlphaOutputEnabled = data.settings.isAlphaOutputEnabled;
  1339. RTHandle sourceTextureHdl = data.sourceTexture;
  1340. RTHandle destinationTextureHdl = data.destinationTexture;
  1341. PostProcessUtils.SetSourceSize(cmd, data.sourceTexture);
  1342. if (isFxaaEnabled)
  1343. material.EnableKeyword(ShaderKeywordStrings.Fxaa);
  1344. if (isFsrEnabled)
  1345. {
  1346. // RCAS
  1347. // Use the override value if it's available, otherwise use the default.
  1348. float sharpness = data.cameraData.fsrOverrideSharpness ? data.cameraData.fsrSharpness : FSRUtils.kDefaultSharpnessLinear;
  1349. // Set up the parameters for the RCAS pass unless the sharpness value indicates that it wont have any effect.
  1350. if (data.cameraData.fsrSharpness > 0.0f)
  1351. {
  1352. // RCAS is performed during the final post blit, but we set up the parameters here for better logical grouping.
  1353. material.EnableKeyword(requireHDROutput ? ShaderKeywordStrings.EasuRcasAndHDRInput : ShaderKeywordStrings.Rcas);
  1354. FSRUtils.SetRcasConstantsLinear(cmd, sharpness);
  1355. }
  1356. }
  1357. else if (isRcasEnabled) // RCAS only
  1358. {
  1359. // Reuse RCAS as a standalone sharpening filter for TAA.
  1360. // If FSR is enabled then it overrides the sharpening/TAA setting and we skip it.
  1361. material.EnableKeyword(ShaderKeywordStrings.Rcas);
  1362. FSRUtils.SetRcasConstantsLinear(cmd, data.cameraData.taaSettings.contrastAdaptiveSharpening);
  1363. }
  1364. if (isAlphaOutputEnabled)
  1365. CoreUtils.SetKeyword(material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, isAlphaOutputEnabled);
  1366. bool isRenderToBackBufferTarget = !data.cameraData.isSceneViewCamera;
  1367. #if ENABLE_VR && ENABLE_XR_MODULE
  1368. if (data.cameraData.xr.enabled)
  1369. isRenderToBackBufferTarget = destinationTextureHdl == data.cameraData.xr.renderTarget;
  1370. #endif
  1371. // HDR debug views force-renders to DebugScreenTexture.
  1372. isRenderToBackBufferTarget &= !resolveToDebugScreen;
  1373. Vector2 viewportScale = sourceTextureHdl.useScaling ? new Vector2(sourceTextureHdl.rtHandleProperties.rtHandleScale.x, sourceTextureHdl.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  1374. // We y-flip if
  1375. // 1) we are blitting from render texture to back buffer(UV starts at bottom) and
  1376. // 2) renderTexture starts UV at top
  1377. bool yflip = isRenderToBackBufferTarget && data.cameraData.targetTexture == null && SystemInfo.graphicsUVStartsAtTop;
  1378. Vector4 scaleBias = yflip ? new Vector4(viewportScale.x, -viewportScale.y, 0, viewportScale.y) : new Vector4(viewportScale.x, viewportScale.y, 0, 0);
  1379. cmd.SetViewport(data.cameraData.pixelRect);
  1380. Blitter.BlitTexture(cmd, sourceTextureHdl, scaleBias, material, 0);
  1381. });
  1382. return;
  1383. }
  1384. }
  1385. public void RenderFinalPassRenderGraph(RenderGraph renderGraph, ContextContainer frameData, in TextureHandle source, in TextureHandle overlayUITexture, in TextureHandle postProcessingTarget, bool enableColorEncodingIfNeeded)
  1386. {
  1387. var stack = VolumeManager.instance.stack;
  1388. m_Tonemapping = stack.GetComponent<Tonemapping>();
  1389. m_FilmGrain = stack.GetComponent<FilmGrain>();
  1390. m_Tonemapping = stack.GetComponent<Tonemapping>();
  1391. UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
  1392. var material = m_Materials.finalPass;
  1393. material.shaderKeywords = null;
  1394. FinalBlitSettings settings = FinalBlitSettings.Create();
  1395. // TODO RENDERGRAPH: when we remove the old path we should review the naming of these variables...
  1396. // m_HasFinalPass is used to let FX passes know when they are not being called by the actual final pass, so they can skip any "final work"
  1397. m_HasFinalPass = false;
  1398. // m_IsFinalPass is used by effects called by RenderFinalPassRenderGraph, so we let them know that we are in a final PP pass
  1399. m_IsFinalPass = true;
  1400. m_EnableColorEncodingIfNeeded = enableColorEncodingIfNeeded;
  1401. if (m_FilmGrain.IsActive())
  1402. {
  1403. material.EnableKeyword(ShaderKeywordStrings.FilmGrain);
  1404. PostProcessUtils.ConfigureFilmGrain(
  1405. m_Data,
  1406. m_FilmGrain,
  1407. cameraData.pixelWidth, cameraData.pixelHeight,
  1408. material
  1409. );
  1410. }
  1411. if (cameraData.isDitheringEnabled)
  1412. {
  1413. material.EnableKeyword(ShaderKeywordStrings.Dithering);
  1414. m_DitheringTextureIndex = PostProcessUtils.ConfigureDithering(
  1415. m_Data,
  1416. m_DitheringTextureIndex,
  1417. cameraData.pixelWidth, cameraData.pixelHeight,
  1418. material
  1419. );
  1420. }
  1421. if (RequireSRGBConversionBlitToBackBuffer(cameraData.requireSrgbConversion))
  1422. material.EnableKeyword(ShaderKeywordStrings.LinearToSRGBConversion);
  1423. settings.hdrOperations = HDROutputUtils.Operation.None;
  1424. settings.requireHDROutput = RequireHDROutput(cameraData);
  1425. if (settings.requireHDROutput)
  1426. {
  1427. // If there is a final post process pass, it's always the final pass so do color encoding
  1428. settings.hdrOperations = m_EnableColorEncodingIfNeeded ? HDROutputUtils.Operation.ColorEncoding : HDROutputUtils.Operation.None;
  1429. // If the color space conversion wasn't applied by the uber pass, do it here
  1430. if (!cameraData.postProcessEnabled)
  1431. settings.hdrOperations |= HDROutputUtils.Operation.ColorConversion;
  1432. SetupHDROutput(cameraData.hdrDisplayInformation, cameraData.hdrDisplayColorGamut, material, settings.hdrOperations);
  1433. }
  1434. DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
  1435. bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(cameraData.resolveFinalTarget);
  1436. debugHandler?.UpdateShaderGlobalPropertiesForFinalValidationPass(renderGraph, cameraData, !m_HasFinalPass && !resolveToDebugScreen);
  1437. settings.isAlphaOutputEnabled = cameraData.isAlphaOutputEnabled;
  1438. settings.isFxaaEnabled = (cameraData.antialiasing == AntialiasingMode.FastApproximateAntialiasing);
  1439. settings.isFsrEnabled = ((cameraData.imageScalingMode == ImageScalingMode.Upscaling) && (cameraData.upscalingFilter == ImageUpscalingFilter.FSR));
  1440. // Reuse RCAS pass as an optional standalone post sharpening pass for TAA.
  1441. // This avoids the cost of EASU and is available for other upscaling options.
  1442. // If FSR is enabled then FSR settings override the TAA settings and we perform RCAS only once.
  1443. // If STP is enabled, then TAA sharpening has already been performed inside STP.
  1444. settings.isTaaSharpeningEnabled = (cameraData.IsTemporalAAEnabled() && cameraData.taaSettings.contrastAdaptiveSharpening > 0.0f) && !settings.isFsrEnabled && !cameraData.IsSTPEnabled();
  1445. var tempRtDesc = cameraData.cameraTargetDescriptor;
  1446. tempRtDesc.msaaSamples = 1;
  1447. tempRtDesc.depthBufferBits = 0;
  1448. // Select a UNORM format since we've already performed tonemapping. (Values are in 0-1 range)
  1449. // This improves precision and is required if we want to avoid excessive banding when FSR is in use.
  1450. if (!settings.requireHDROutput)
  1451. tempRtDesc.graphicsFormat = UniversalRenderPipeline.MakeUnormRenderTextureGraphicsFormat();
  1452. var scalingSetupTarget = UniversalRenderer.CreateRenderGraphTexture(renderGraph, tempRtDesc, "scalingSetupTarget", true, FilterMode.Point);
  1453. var upscaleRtDesc = cameraData.cameraTargetDescriptor;
  1454. upscaleRtDesc.msaaSamples = 1;
  1455. upscaleRtDesc.depthBufferBits = 0;
  1456. upscaleRtDesc.width = cameraData.pixelWidth;
  1457. upscaleRtDesc.height = cameraData.pixelHeight;
  1458. var upScaleTarget = UniversalRenderer.CreateRenderGraphTexture(renderGraph, upscaleRtDesc, "_UpscaledTexture", true, FilterMode.Point);
  1459. var currentSource = source;
  1460. if (cameraData.imageScalingMode != ImageScalingMode.None)
  1461. {
  1462. // When FXAA is enabled in scaled renders, we execute it in a separate blit since it's not designed to be used in
  1463. // situations where the input and output resolutions do not match.
  1464. // When FSR is active, we always need an additional pass since it has a very particular color encoding requirement.
  1465. // NOTE: An ideal implementation could inline this color conversion logic into the UberPost pass, but the current code structure would make
  1466. // this process very complex. Specifically, we'd need to guarantee that the uber post output is always written to a UNORM format render
  1467. // target in order to preserve the precision of specially encoded color data.
  1468. bool isSetupRequired = (settings.isFxaaEnabled || settings.isFsrEnabled);
  1469. // When FXAA is needed while scaling is active, we must perform it before the scaling takes place.
  1470. if (isSetupRequired)
  1471. {
  1472. RenderFinalSetup(renderGraph, cameraData, in currentSource, in scalingSetupTarget, ref settings);
  1473. currentSource = scalingSetupTarget;
  1474. // Indicate that we no longer need to perform FXAA in the final pass since it was already perfomed here.
  1475. settings.isFxaaEnabled = false;
  1476. }
  1477. switch (cameraData.imageScalingMode)
  1478. {
  1479. case ImageScalingMode.Upscaling:
  1480. {
  1481. switch (cameraData.upscalingFilter)
  1482. {
  1483. case ImageUpscalingFilter.Point:
  1484. {
  1485. // TAA post sharpening is an RCAS pass, avoid overriding it with point sampling.
  1486. if (!settings.isTaaSharpeningEnabled)
  1487. material.EnableKeyword(ShaderKeywordStrings.PointSampling);
  1488. break;
  1489. }
  1490. case ImageUpscalingFilter.Linear:
  1491. {
  1492. break;
  1493. }
  1494. case ImageUpscalingFilter.FSR:
  1495. {
  1496. RenderFinalFSRScale(renderGraph, in currentSource, in upScaleTarget, settings.isAlphaOutputEnabled);
  1497. currentSource = upScaleTarget;
  1498. break;
  1499. }
  1500. }
  1501. break;
  1502. }
  1503. case ImageScalingMode.Downscaling:
  1504. {
  1505. // In the downscaling case, we don't perform any sort of filter override logic since we always want linear filtering
  1506. // and it's already the default option in the shader.
  1507. // Also disable TAA post sharpening pass when downscaling.
  1508. settings.isTaaSharpeningEnabled = false;
  1509. break;
  1510. }
  1511. }
  1512. }
  1513. else if (settings.isFxaaEnabled)
  1514. {
  1515. // In unscaled renders, FXAA can be safely performed in the FinalPost shader
  1516. material.EnableKeyword(ShaderKeywordStrings.Fxaa);
  1517. }
  1518. RenderFinalBlit(renderGraph, cameraData, in currentSource, in overlayUITexture, in postProcessingTarget, ref settings);
  1519. }
  1520. #endregion
  1521. #region UberPost
  1522. private class UberPostPassData
  1523. {
  1524. internal TextureHandle destinationTexture;
  1525. internal TextureHandle sourceTexture;
  1526. internal TextureHandle lutTexture;
  1527. internal TextureHandle depthTexture;
  1528. internal Vector4 lutParams;
  1529. internal TextureHandle userLutTexture;
  1530. internal Vector4 userLutParams;
  1531. internal Material material;
  1532. internal UniversalCameraData cameraData;
  1533. internal TonemappingMode toneMappingMode;
  1534. internal bool isHdrGrading;
  1535. internal bool isBackbuffer;
  1536. internal bool enableAlphaOutput;
  1537. }
  1538. public void RenderUberPost(RenderGraph renderGraph, ContextContainer frameData, UniversalCameraData cameraData, UniversalPostProcessingData postProcessingData, in TextureHandle sourceTexture, in TextureHandle destTexture, in TextureHandle lutTexture, in TextureHandle overlayUITexture, bool requireHDROutput, bool enableAlphaOutput, bool resolveToDebugScreen)
  1539. {
  1540. var material = m_Materials.uber;
  1541. bool hdrGrading = postProcessingData.gradingMode == ColorGradingMode.HighDynamicRange;
  1542. int lutHeight = postProcessingData.lutSize;
  1543. int lutWidth = lutHeight * lutHeight;
  1544. // Source material setup
  1545. float postExposureLinear = Mathf.Pow(2f, m_ColorAdjustments.postExposure.value);
  1546. Vector4 lutParams = new Vector4(1f / lutWidth, 1f / lutHeight, lutHeight - 1f, postExposureLinear);
  1547. RTHandle userLutRThdl = m_ColorLookup.texture.value ? RTHandles.Alloc(m_ColorLookup.texture.value) : null;
  1548. TextureHandle userLutTexture = userLutRThdl != null ? renderGraph.ImportTexture(userLutRThdl) : TextureHandle.nullHandle;
  1549. Vector4 userLutParams = !m_ColorLookup.IsActive()
  1550. ? Vector4.zero
  1551. : new Vector4(1f / m_ColorLookup.texture.value.width,
  1552. 1f / m_ColorLookup.texture.value.height,
  1553. m_ColorLookup.texture.value.height - 1f,
  1554. m_ColorLookup.contribution.value);
  1555. using (var builder = renderGraph.AddRasterRenderPass<UberPostPassData>("Blit Post Processing", out var passData, ProfilingSampler.Get(URPProfileId.RG_UberPost)))
  1556. {
  1557. UniversalRenderer renderer = cameraData.renderer as UniversalRenderer;
  1558. UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
  1559. if (cameraData.requiresDepthTexture && renderer != null)
  1560. {
  1561. if (renderer.renderingModeActual != RenderingMode.Deferred)
  1562. {
  1563. builder.UseGlobalTexture(s_CameraDepthTextureID);
  1564. passData.depthTexture = resourceData.activeDepthTexture;
  1565. }
  1566. else if (renderer.deferredLights.GbufferDepthIndex != -1)
  1567. {
  1568. builder.UseTexture(resourceData.gBuffer[renderer.deferredLights.GbufferDepthIndex]);
  1569. passData.depthTexture = resourceData.gBuffer[renderer.deferredLights.GbufferDepthIndex];
  1570. }
  1571. }
  1572. if (cameraData.requiresOpaqueTexture && renderer != null)
  1573. builder.UseGlobalTexture(s_CameraOpaqueTextureID);
  1574. builder.AllowGlobalStateModification(true);
  1575. passData.destinationTexture = destTexture;
  1576. builder.SetRenderAttachment(destTexture, 0, AccessFlags.Write);
  1577. passData.sourceTexture = sourceTexture;
  1578. builder.UseTexture(sourceTexture, AccessFlags.Read);
  1579. passData.lutTexture = lutTexture;
  1580. builder.UseTexture(lutTexture, AccessFlags.Read);
  1581. passData.lutParams = lutParams;
  1582. if (userLutTexture.IsValid())
  1583. {
  1584. passData.userLutTexture = userLutTexture;
  1585. builder.UseTexture(userLutTexture, AccessFlags.Read);
  1586. }
  1587. if (m_Bloom.IsActive())
  1588. builder.UseTexture(_BloomMipUp[0], AccessFlags.Read);
  1589. if (requireHDROutput && m_EnableColorEncodingIfNeeded)
  1590. builder.UseTexture(overlayUITexture, AccessFlags.Read);
  1591. passData.userLutParams = userLutParams;
  1592. passData.cameraData = cameraData;
  1593. passData.material = material;
  1594. passData.toneMappingMode = m_Tonemapping.mode.value;
  1595. passData.isHdrGrading = hdrGrading;
  1596. passData.enableAlphaOutput = enableAlphaOutput;
  1597. builder.SetRenderFunc(static (UberPostPassData data, RasterGraphContext context) =>
  1598. {
  1599. var cmd = context.cmd;
  1600. var camera = data.cameraData.camera;
  1601. var material = data.material;
  1602. RTHandle sourceTextureHdl = data.sourceTexture;
  1603. if(data.depthTexture.IsValid())
  1604. material.SetTexture(s_CameraDepthTextureID, data.depthTexture);
  1605. material.SetTexture(ShaderConstants._InternalLut, data.lutTexture);
  1606. material.SetVector(ShaderConstants._Lut_Params, data.lutParams);
  1607. material.SetTexture(ShaderConstants._UserLut, data.userLutTexture);
  1608. material.SetVector(ShaderConstants._UserLut_Params, data.userLutParams);
  1609. if (data.isHdrGrading)
  1610. {
  1611. material.EnableKeyword(ShaderKeywordStrings.HDRGrading);
  1612. }
  1613. else
  1614. {
  1615. switch (data.toneMappingMode)
  1616. {
  1617. case TonemappingMode.Neutral: material.EnableKeyword(ShaderKeywordStrings.TonemapNeutral); break;
  1618. case TonemappingMode.ACES: material.EnableKeyword(ShaderKeywordStrings.TonemapACES); break;
  1619. default: break; // None
  1620. }
  1621. }
  1622. CoreUtils.SetKeyword(material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, data.enableAlphaOutput);
  1623. // Done with Uber, blit it
  1624. ScaleViewportAndBlit(cmd, sourceTextureHdl, data.destinationTexture, data.cameraData, material);
  1625. });
  1626. return;
  1627. }
  1628. }
  1629. #endregion
  1630. private class PostFXSetupPassData { }
  1631. public void RenderPostProcessingRenderGraph(RenderGraph renderGraph, ContextContainer frameData, in TextureHandle activeCameraColorTexture, in TextureHandle lutTexture, in TextureHandle overlayUITexture, in TextureHandle postProcessingTarget, bool hasFinalPass, bool resolveToDebugScreen, bool enableColorEndingIfNeeded)
  1632. {
  1633. UniversalResourceData resourceData = frameData.Get<UniversalResourceData>();
  1634. UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
  1635. UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
  1636. UniversalPostProcessingData postProcessingData = frameData.Get<UniversalPostProcessingData>();
  1637. var stack = VolumeManager.instance.stack;
  1638. m_DepthOfField = stack.GetComponent<DepthOfField>();
  1639. m_MotionBlur = stack.GetComponent<MotionBlur>();
  1640. m_PaniniProjection = stack.GetComponent<PaniniProjection>();
  1641. m_Bloom = stack.GetComponent<Bloom>();
  1642. m_LensFlareScreenSpace = stack.GetComponent<ScreenSpaceLensFlare>();
  1643. m_LensDistortion = stack.GetComponent<LensDistortion>();
  1644. m_ChromaticAberration = stack.GetComponent<ChromaticAberration>();
  1645. m_Vignette = stack.GetComponent<Vignette>();
  1646. m_ColorLookup = stack.GetComponent<ColorLookup>();
  1647. m_ColorAdjustments = stack.GetComponent<ColorAdjustments>();
  1648. m_Tonemapping = stack.GetComponent<Tonemapping>();
  1649. m_FilmGrain = stack.GetComponent<FilmGrain>();
  1650. m_UseFastSRGBLinearConversion = postProcessingData.useFastSRGBLinearConversion;
  1651. m_SupportDataDrivenLensFlare = postProcessingData.supportDataDrivenLensFlare;
  1652. m_SupportScreenSpaceLensFlare = postProcessingData.supportScreenSpaceLensFlare;
  1653. m_Descriptor = cameraData.cameraTargetDescriptor;
  1654. m_Descriptor.useMipMap = false;
  1655. m_Descriptor.autoGenerateMips = false;
  1656. m_HasFinalPass = hasFinalPass;
  1657. m_EnableColorEncodingIfNeeded = enableColorEndingIfNeeded;
  1658. ref ScriptableRenderer renderer = ref cameraData.renderer;
  1659. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  1660. //We blit back and forth without msaa untill the last blit.
  1661. bool useStopNan = cameraData.isStopNaNEnabled && m_Materials.stopNaN != null;
  1662. bool useSubPixelMorpAA = cameraData.antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing;
  1663. var dofMaterial = m_DepthOfField.mode.value == DepthOfFieldMode.Gaussian ? m_Materials.gaussianDepthOfField : m_Materials.bokehDepthOfField;
  1664. bool useDepthOfField = m_DepthOfField.IsActive() && !isSceneViewCamera && dofMaterial != null;
  1665. bool useLensFlare = !LensFlareCommonSRP.Instance.IsEmpty() && m_SupportDataDrivenLensFlare;
  1666. bool useLensFlareScreenSpace = m_LensFlareScreenSpace.IsActive() && m_SupportScreenSpaceLensFlare;
  1667. bool useMotionBlur = m_MotionBlur.IsActive() && !isSceneViewCamera;
  1668. bool usePaniniProjection = m_PaniniProjection.IsActive() && !isSceneViewCamera;
  1669. bool isFsrEnabled = ((cameraData.imageScalingMode == ImageScalingMode.Upscaling) && (cameraData.upscalingFilter == ImageUpscalingFilter.FSR));
  1670. // Disable MotionBlur in EditMode, so that editing remains clear and readable.
  1671. // NOTE: HDRP does the same via CoreUtils::AreAnimatedMaterialsEnabled().
  1672. // Disable MotionBlurMode.CameraAndObjects on renderers that do not support motion vectors
  1673. useMotionBlur = useMotionBlur && Application.isPlaying;
  1674. if (useMotionBlur && m_MotionBlur.mode.value == MotionBlurMode.CameraAndObjects)
  1675. {
  1676. useMotionBlur &= renderer.SupportsMotionVectors();
  1677. if (!useMotionBlur)
  1678. {
  1679. var warning = "Disabling Motion Blur for Camera And Objects because the renderer does not implement motion vectors.";
  1680. const int warningThrottleFrames = 60 * 1; // 60 FPS * 1 sec
  1681. if (Time.frameCount % warningThrottleFrames == 0)
  1682. Debug.LogWarning(warning);
  1683. }
  1684. }
  1685. // Note that enabling jitters uses the same CameraData::IsTemporalAAEnabled(). So if we add any other kind of overrides (like
  1686. // disable useTemporalAA if another feature is disabled) then we need to put it in CameraData::IsTemporalAAEnabled() as opposed
  1687. // to tweaking the value here.
  1688. bool useTemporalAA = cameraData.IsTemporalAAEnabled();
  1689. if (cameraData.antialiasing == AntialiasingMode.TemporalAntiAliasing && !useTemporalAA)
  1690. TemporalAA.ValidateAndWarn(cameraData);
  1691. // STP is only supported when TAA is enabled and all of its runtime requirements are met.
  1692. // See the comments for IsSTPEnabled() for more information.
  1693. bool useSTP = useTemporalAA && cameraData.IsSTPEnabled();
  1694. using (var builder = renderGraph.AddRasterRenderPass<PostFXSetupPassData>("Setup PostFX passes", out var passData,
  1695. ProfilingSampler.Get(URPProfileId.RG_SetupPostFX)))
  1696. {
  1697. // TODO RENDERGRAPH: properly setup dependencies between passes
  1698. builder.AllowPassCulling(false);
  1699. builder.AllowGlobalStateModification(true);
  1700. builder.SetRenderFunc(static (PostFXSetupPassData data, RasterGraphContext context) =>
  1701. {
  1702. // Setup projection matrix for cmd.DrawMesh()
  1703. context.cmd.SetGlobalMatrix(ShaderConstants._FullscreenProjMat, GL.GetGPUProjectionMatrix(Matrix4x4.identity, true));
  1704. });
  1705. }
  1706. TextureHandle currentSource = activeCameraColorTexture;
  1707. // Optional NaN killer before post-processing kicks in
  1708. // stopNaN may be null on Adreno 3xx. It doesn't support full shader level 3.5, but SystemInfo.graphicsShaderLevel is 35.
  1709. if (useStopNan)
  1710. {
  1711. RenderStopNaN(renderGraph, cameraData.cameraTargetDescriptor, in currentSource, out var stopNaNTarget);
  1712. currentSource = stopNaNTarget;
  1713. }
  1714. if(useSubPixelMorpAA)
  1715. {
  1716. RenderSMAA(renderGraph, resourceData, cameraData.antialiasingQuality, in currentSource, out var SMAATarget);
  1717. currentSource = SMAATarget;
  1718. }
  1719. // Depth of Field
  1720. // Adreno 3xx SystemInfo.graphicsShaderLevel is 35, but instancing support is disabled due to buggy drivers.
  1721. // DOF shader uses #pragma target 3.5 which adds requirement for instancing support, thus marking the shader unsupported on those devices.
  1722. if (useDepthOfField)
  1723. {
  1724. RenderDoF(renderGraph, resourceData, cameraData, in currentSource, out var DoFTarget);
  1725. currentSource = DoFTarget;
  1726. }
  1727. // Temporal Anti Aliasing
  1728. if (useTemporalAA)
  1729. {
  1730. if (useSTP)
  1731. {
  1732. RenderSTP(renderGraph, resourceData, cameraData, ref currentSource, out var StpTarget);
  1733. currentSource = StpTarget;
  1734. }
  1735. else
  1736. {
  1737. RenderTemporalAA(renderGraph, resourceData, cameraData, ref currentSource, out var TemporalAATarget);
  1738. currentSource = TemporalAATarget;
  1739. }
  1740. }
  1741. if(useMotionBlur)
  1742. {
  1743. RenderMotionBlur(renderGraph, resourceData, cameraData, in currentSource, out var MotionBlurTarget);
  1744. currentSource = MotionBlurTarget;
  1745. }
  1746. if(usePaniniProjection)
  1747. {
  1748. RenderPaniniProjection(renderGraph, cameraData.camera, in currentSource, out var PaniniTarget);
  1749. currentSource = PaniniTarget;
  1750. }
  1751. // Uberpost
  1752. {
  1753. // Reset uber keywords
  1754. m_Materials.uber.shaderKeywords = null;
  1755. // Bloom goes first
  1756. bool bloomActive = m_Bloom.IsActive();
  1757. //Even if bloom is not active we need the texture if the lensFlareScreenSpace pass is active.
  1758. if (bloomActive || useLensFlareScreenSpace)
  1759. {
  1760. RenderBloomTexture(renderGraph, currentSource, out var BloomTexture, cameraData.isAlphaOutputEnabled);
  1761. if (useLensFlareScreenSpace)
  1762. {
  1763. int maxBloomMip = Mathf.Clamp(m_LensFlareScreenSpace.bloomMip.value, 0, m_Bloom.maxIterations.value/2);
  1764. BloomTexture = RenderLensFlareScreenSpace(renderGraph, cameraData.camera, in currentSource, _BloomMipUp[0], _BloomMipUp[maxBloomMip], cameraData.xr.enabled);
  1765. }
  1766. UberPostSetupBloomPass(renderGraph, in BloomTexture, m_Materials.uber);
  1767. }
  1768. if (useLensFlare)
  1769. {
  1770. LensFlareDataDrivenComputeOcclusion(renderGraph, resourceData, cameraData);
  1771. RenderLensFlareDataDriven(renderGraph, resourceData, cameraData, in currentSource);
  1772. }
  1773. // TODO RENDERGRAPH: Once we started removing the non-RG code pass in URP, we should move functions below to renderfunc so that material setup happens at
  1774. // the same timeline of executing the rendergraph. Keep them here for now so we cound reuse non-RG code to reduce maintainance cost.
  1775. SetupLensDistortion(m_Materials.uber, isSceneViewCamera);
  1776. SetupChromaticAberration(m_Materials.uber);
  1777. SetupVignette(m_Materials.uber, cameraData.xr);
  1778. SetupGrain(cameraData, m_Materials.uber);
  1779. SetupDithering(cameraData, m_Materials.uber);
  1780. if (RequireSRGBConversionBlitToBackBuffer(cameraData.requireSrgbConversion))
  1781. m_Materials.uber.EnableKeyword(ShaderKeywordStrings.LinearToSRGBConversion);
  1782. if (m_UseFastSRGBLinearConversion)
  1783. {
  1784. m_Materials.uber.EnableKeyword(ShaderKeywordStrings.UseFastSRGBLinearConversion);
  1785. }
  1786. bool requireHDROutput = RequireHDROutput(cameraData);
  1787. if (requireHDROutput)
  1788. {
  1789. // Color space conversion is already applied through color grading, do encoding if uber post is the last pass
  1790. // Otherwise encoding will happen in the final post process pass or the final blit pass
  1791. HDROutputUtils.Operation hdrOperations = !m_HasFinalPass && m_EnableColorEncodingIfNeeded ? HDROutputUtils.Operation.ColorEncoding : HDROutputUtils.Operation.None;
  1792. SetupHDROutput(cameraData.hdrDisplayInformation, cameraData.hdrDisplayColorGamut, m_Materials.uber, hdrOperations);
  1793. }
  1794. bool enableAlphaOutput = cameraData.isAlphaOutputEnabled;
  1795. DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
  1796. debugHandler?.UpdateShaderGlobalPropertiesForFinalValidationPass(renderGraph, cameraData, !m_HasFinalPass && !resolveToDebugScreen);
  1797. RenderUberPost(renderGraph, frameData, cameraData, postProcessingData, in currentSource, in postProcessingTarget, in lutTexture, in overlayUITexture, requireHDROutput, enableAlphaOutput, resolveToDebugScreen);
  1798. }
  1799. }
  1800. }
  1801. }