Nessuna descrizione
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

PostProcessPass.cs 101KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025
  1. using System;
  2. using System.Runtime.CompilerServices;
  3. using UnityEngine.Experimental.Rendering;
  4. using UnityEngine.Rendering.RenderGraphModule;
  5. namespace UnityEngine.Rendering.Universal
  6. {
  7. /// <summary>
  8. /// Renders the post-processing effect stack.
  9. /// </summary>
  10. internal partial class PostProcessPass : ScriptableRenderPass
  11. {
  12. RenderTextureDescriptor m_Descriptor;
  13. RTHandle m_Source;
  14. RTHandle m_Destination;
  15. RTHandle m_Depth;
  16. RTHandle m_InternalLut;
  17. RTHandle m_MotionVectors;
  18. RTHandle m_FullCoCTexture;
  19. RTHandle m_HalfCoCTexture;
  20. RTHandle m_PingTexture;
  21. RTHandle m_PongTexture;
  22. RTHandle[] m_BloomMipDown;
  23. RTHandle[] m_BloomMipUp;
  24. TextureHandle[] _BloomMipUp;
  25. TextureHandle[] _BloomMipDown;
  26. RTHandle m_BlendTexture;
  27. RTHandle m_EdgeColorTexture;
  28. RTHandle m_EdgeStencilTexture;
  29. RTHandle m_TempTarget;
  30. RTHandle m_TempTarget2;
  31. RTHandle m_StreakTmpTexture;
  32. RTHandle m_StreakTmpTexture2;
  33. RTHandle m_ScreenSpaceLensFlareResult;
  34. const string k_RenderPostProcessingTag = "Blit PostProcessing Effects";
  35. const string k_RenderFinalPostProcessingTag = "Blit Final PostProcessing";
  36. private static readonly ProfilingSampler m_ProfilingRenderPostProcessing = new ProfilingSampler(k_RenderPostProcessingTag);
  37. private static readonly ProfilingSampler m_ProfilingRenderFinalPostProcessing = new ProfilingSampler(k_RenderFinalPostProcessingTag);
  38. MaterialLibrary m_Materials;
  39. PostProcessData m_Data;
  40. // Builtin effects settings
  41. DepthOfField m_DepthOfField;
  42. MotionBlur m_MotionBlur;
  43. ScreenSpaceLensFlare m_LensFlareScreenSpace;
  44. PaniniProjection m_PaniniProjection;
  45. Bloom m_Bloom;
  46. LensDistortion m_LensDistortion;
  47. ChromaticAberration m_ChromaticAberration;
  48. Vignette m_Vignette;
  49. ColorLookup m_ColorLookup;
  50. ColorAdjustments m_ColorAdjustments;
  51. Tonemapping m_Tonemapping;
  52. FilmGrain m_FilmGrain;
  53. // Depth Of Field shader passes
  54. const int k_GaussianDoFPassComputeCoc = 0;
  55. const int k_GaussianDoFPassDownscalePrefilter = 1;
  56. const int k_GaussianDoFPassBlurH = 2;
  57. const int k_GaussianDoFPassBlurV = 3;
  58. const int k_GaussianDoFPassComposite = 4;
  59. const int k_BokehDoFPassComputeCoc = 0;
  60. const int k_BokehDoFPassDownscalePrefilter = 1;
  61. const int k_BokehDoFPassBlur = 2;
  62. const int k_BokehDoFPassPostFilter = 3;
  63. const int k_BokehDoFPassComposite = 4;
  64. // Misc
  65. const int k_MaxPyramidSize = 16;
  66. readonly GraphicsFormat m_DefaultColorFormat; // The default format for post-processing, follows back-buffer format in URP.
  67. bool m_DefaultColorFormatIsAlpha;
  68. bool m_DefaultColorFormatUseRGBM;
  69. readonly GraphicsFormat m_SMAAEdgeFormat;
  70. readonly GraphicsFormat m_GaussianCoCFormat;
  71. int m_DitheringTextureIndex;
  72. RenderTargetIdentifier[] m_MRT2;
  73. Vector4[] m_BokehKernel;
  74. int m_BokehHash;
  75. // Needed if the device changes its render target width/height (ex, Mobile platform allows change of orientation)
  76. float m_BokehMaxRadius;
  77. float m_BokehRCPAspect;
  78. // True when this is the very last pass in the pipeline
  79. bool m_IsFinalPass;
  80. // If there's a final post process pass after this pass.
  81. // If yes, Film Grain and Dithering are setup in the final pass, otherwise they are setup in this pass.
  82. bool m_HasFinalPass;
  83. // Some Android devices do not support sRGB backbuffer
  84. // We need to do the conversion manually on those
  85. // Also if HDR output is active
  86. bool m_EnableColorEncodingIfNeeded;
  87. // Use Fast conversions between SRGB and Linear
  88. bool m_UseFastSRGBLinearConversion;
  89. // Support Screen Space Lens Flare post process effect
  90. bool m_SupportScreenSpaceLensFlare;
  91. // Support Data Driven Lens Flare post process effect
  92. bool m_SupportDataDrivenLensFlare;
  93. // Blit to screen or color frontbuffer at the end
  94. bool m_ResolveToScreen;
  95. // Renderer is using swapbuffer system
  96. bool m_UseSwapBuffer;
  97. // RTHandle used as a temporary target when operations need to be performed before image scaling
  98. RTHandle m_ScalingSetupTarget;
  99. // RTHandle used as a temporary target when operations need to be performed after upscaling
  100. RTHandle m_UpscaledTarget;
  101. Material m_BlitMaterial;
  102. // Cached bloom params from previous frame to avoid unnecessary material updates
  103. BloomMaterialParams m_BloomParamsPrev;
  104. /// <summary>
  105. /// Creates a new <c>PostProcessPass</c> instance.
  106. /// </summary>
  107. /// <param name="evt">The <c>RenderPassEvent</c> to use.</param>
  108. /// <param name="data">The <c>PostProcessData</c> resources to use.</param>
  109. /// <param name="postProcessParams">The <c>PostProcessParams</c> run-time params to use.</param>
  110. /// <seealso cref="RenderPassEvent"/>
  111. /// <seealso cref="PostProcessData"/>
  112. /// <seealso cref="PostProcessParams"/>
  113. public PostProcessPass(RenderPassEvent evt, PostProcessData data, ref PostProcessParams postProcessParams)
  114. {
  115. base.profilingSampler = new ProfilingSampler(nameof(PostProcessPass));
  116. renderPassEvent = evt;
  117. m_Data = data;
  118. m_Materials = new MaterialLibrary(data);
  119. // Bloom pyramid shader ids - can't use a simple stackalloc in the bloom function as we
  120. // unfortunately need to allocate strings
  121. ShaderConstants._BloomMipUp = new int[k_MaxPyramidSize];
  122. ShaderConstants._BloomMipDown = new int[k_MaxPyramidSize];
  123. m_BloomMipUp = new RTHandle[k_MaxPyramidSize];
  124. m_BloomMipDown = new RTHandle[k_MaxPyramidSize];
  125. // Bloom pyramid TextureHandles
  126. _BloomMipUp = new TextureHandle[k_MaxPyramidSize];
  127. _BloomMipDown = new TextureHandle[k_MaxPyramidSize];
  128. for (int i = 0; i < k_MaxPyramidSize; i++)
  129. {
  130. ShaderConstants._BloomMipUp[i] = Shader.PropertyToID("_BloomMipUp" + i);
  131. ShaderConstants._BloomMipDown[i] = Shader.PropertyToID("_BloomMipDown" + i);
  132. // Get name, will get Allocated with descriptor later
  133. m_BloomMipUp[i] = RTHandles.Alloc(ShaderConstants._BloomMipUp[i], name: "_BloomMipUp" + i);
  134. m_BloomMipDown[i] = RTHandles.Alloc(ShaderConstants._BloomMipDown[i], name: "_BloomMipDown" + i);
  135. }
  136. m_MRT2 = new RenderTargetIdentifier[2];
  137. base.useNativeRenderPass = false;
  138. m_BlitMaterial = postProcessParams.blitMaterial;
  139. // NOTE: Request color format is the back-buffer color format. It can be HDR or SDR (when HDR disabled).
  140. // Request color might have alpha or might not have alpha.
  141. // The actual post-process target can be different. A RenderTexture with a custom format. Not necessarily a back-buffer.
  142. // A RenderTexture with a custom format can have an alpha channel, regardless of the back-buffer setting,
  143. // so the post-processing should just use the current target format/alpha to toggle alpha output.
  144. //
  145. // However, we want to filter out the alpha shader variants when not used (common case).
  146. // The rule is that URP post-processing format follows the back-buffer format setting.
  147. bool requestHDR = IsHDRFormat(postProcessParams.requestColorFormat);
  148. bool requestAlpha = IsAlphaFormat(postProcessParams.requestColorFormat);
  149. // Texture format pre-lookup
  150. // UUM-41070: We require `Linear | Render` but with the deprecated FormatUsage this was checking `Blend`
  151. // For now, we keep checking for `Blend` until the performance hit of doing the correct checks is evaluated
  152. if (requestHDR)
  153. {
  154. m_DefaultColorFormatIsAlpha = requestAlpha;
  155. m_DefaultColorFormatUseRGBM = false;
  156. const GraphicsFormatUsage usage = GraphicsFormatUsage.Blend;
  157. if (SystemInfo.IsFormatSupported(postProcessParams.requestColorFormat, usage)) // Typically, RGBA16Float.
  158. {
  159. m_DefaultColorFormat = postProcessParams.requestColorFormat;
  160. }
  161. else if (SystemInfo.IsFormatSupported(GraphicsFormat.B10G11R11_UFloatPack32, usage)) // HDR fallback
  162. {
  163. // NOTE: Technically request format can be with alpha, however if it's not supported and we fall back here
  164. // , we assume no alpha. Post-process default format follows the back buffer format.
  165. // If support failed, it must have failed for back buffer too.
  166. m_DefaultColorFormat = GraphicsFormat.B10G11R11_UFloatPack32;
  167. m_DefaultColorFormatIsAlpha = false;
  168. }
  169. else
  170. {
  171. m_DefaultColorFormat = QualitySettings.activeColorSpace == ColorSpace.Linear
  172. ? GraphicsFormat.R8G8B8A8_SRGB
  173. : GraphicsFormat.R8G8B8A8_UNorm;
  174. m_DefaultColorFormatUseRGBM = true; // Encode HDR data into RGBA8888 as RGBM (RGB, Multiplier)
  175. }
  176. }
  177. else // SDR
  178. {
  179. m_DefaultColorFormat = QualitySettings.activeColorSpace == ColorSpace.Linear
  180. ? GraphicsFormat.R8G8B8A8_SRGB
  181. : GraphicsFormat.R8G8B8A8_UNorm;
  182. m_DefaultColorFormatIsAlpha = true;
  183. // TODO: Bloom uses RGBM to Emulate HDR.
  184. // TODO: Lens Flares render into the bloom texture, but do not support RGBM encoding at the moment.
  185. // RGBM is disabled in the SDR case for now.
  186. m_DefaultColorFormatUseRGBM = false;
  187. }
  188. // Only two components are needed for edge render texture, but on some vendors four components may be faster.
  189. if (SystemInfo.IsFormatSupported(GraphicsFormat.R8G8_UNorm, GraphicsFormatUsage.Render) && SystemInfo.graphicsDeviceVendor.ToLowerInvariant().Contains("arm"))
  190. m_SMAAEdgeFormat = GraphicsFormat.R8G8_UNorm;
  191. else
  192. m_SMAAEdgeFormat = GraphicsFormat.R8G8B8A8_UNorm;
  193. // UUM-41070: We require `Linear | Render` but with the deprecated FormatUsage this was checking `Blend`
  194. // For now, we keep checking for `Blend` until the performance hit of doing the correct checks is evaluated
  195. if (SystemInfo.IsFormatSupported(GraphicsFormat.R16_UNorm, GraphicsFormatUsage.Blend))
  196. m_GaussianCoCFormat = GraphicsFormat.R16_UNorm;
  197. else if (SystemInfo.IsFormatSupported(GraphicsFormat.R16_SFloat, GraphicsFormatUsage.Blend))
  198. m_GaussianCoCFormat = GraphicsFormat.R16_SFloat;
  199. else // Expect CoC banding
  200. m_GaussianCoCFormat = GraphicsFormat.R8_UNorm;
  201. }
  202. /// <summary>
  203. /// Cleans up the Material Library used in the passes.
  204. /// </summary>
  205. public void Cleanup()
  206. {
  207. m_Materials.Cleanup();
  208. Dispose();
  209. }
  210. /// <summary>
  211. /// Disposes used resources.
  212. /// </summary>
  213. public void Dispose()
  214. {
  215. foreach (var handle in m_BloomMipDown)
  216. handle?.Release();
  217. foreach (var handle in m_BloomMipUp)
  218. handle?.Release();
  219. m_ScalingSetupTarget?.Release();
  220. m_UpscaledTarget?.Release();
  221. m_FullCoCTexture?.Release();
  222. m_HalfCoCTexture?.Release();
  223. m_PingTexture?.Release();
  224. m_PongTexture?.Release();
  225. m_BlendTexture?.Release();
  226. m_EdgeColorTexture?.Release();
  227. m_EdgeStencilTexture?.Release();
  228. m_TempTarget?.Release();
  229. m_TempTarget2?.Release();
  230. m_StreakTmpTexture?.Release();
  231. m_StreakTmpTexture2?.Release();
  232. m_ScreenSpaceLensFlareResult?.Release();
  233. }
  234. /// <summary>
  235. /// Configures the pass.
  236. /// </summary>
  237. /// <param name="baseDescriptor"></param>
  238. /// <param name="source"></param>
  239. /// <param name="resolveToScreen"></param>
  240. /// <param name="depth"></param>
  241. /// <param name="internalLut"></param>
  242. /// <param name="hasFinalPass"></param>
  243. /// <param name="enableColorEncoding"></param>
  244. public void Setup(in RenderTextureDescriptor baseDescriptor, in RTHandle source, bool resolveToScreen, in RTHandle depth, in RTHandle internalLut, in RTHandle motionVectors, bool hasFinalPass, bool enableColorEncoding)
  245. {
  246. m_Descriptor = baseDescriptor;
  247. m_Descriptor.useMipMap = false;
  248. m_Descriptor.autoGenerateMips = false;
  249. m_Source = source;
  250. m_Depth = depth;
  251. m_InternalLut = internalLut;
  252. m_MotionVectors = motionVectors;
  253. m_IsFinalPass = false;
  254. m_HasFinalPass = hasFinalPass;
  255. m_EnableColorEncodingIfNeeded = enableColorEncoding;
  256. m_ResolveToScreen = resolveToScreen;
  257. m_UseSwapBuffer = true;
  258. // Disable obsolete warning for internal usage
  259. #pragma warning disable CS0618
  260. m_Destination = k_CameraTarget;
  261. #pragma warning restore CS0618
  262. }
  263. /// <summary>
  264. /// Configures the pass.
  265. /// </summary>
  266. /// <param name="baseDescriptor"></param>
  267. /// <param name="source"></param>
  268. /// <param name="destination"></param>
  269. /// <param name="depth"></param>
  270. /// <param name="internalLut"></param>
  271. /// <param name="hasFinalPass"></param>
  272. /// <param name="enableColorEncoding"></param>
  273. public void Setup(in RenderTextureDescriptor baseDescriptor, in RTHandle source, RTHandle destination, in RTHandle depth, in RTHandle internalLut, bool hasFinalPass, bool enableColorEncoding)
  274. {
  275. m_Descriptor = baseDescriptor;
  276. m_Descriptor.useMipMap = false;
  277. m_Descriptor.autoGenerateMips = false;
  278. m_Source = source;
  279. m_Destination = destination;
  280. m_Depth = depth;
  281. m_InternalLut = internalLut;
  282. m_IsFinalPass = false;
  283. m_HasFinalPass = hasFinalPass;
  284. m_EnableColorEncodingIfNeeded = enableColorEncoding;
  285. m_UseSwapBuffer = true;
  286. }
  287. /// <summary>
  288. /// Configures the Final pass.
  289. /// </summary>
  290. /// <param name="source"></param>
  291. /// <param name="useSwapBuffer"></param>
  292. /// <param name="enableColorEncoding"></param>
  293. public void SetupFinalPass(in RTHandle source, bool useSwapBuffer = false, bool enableColorEncoding = true)
  294. {
  295. m_Source = source;
  296. m_IsFinalPass = true;
  297. m_HasFinalPass = false;
  298. m_EnableColorEncodingIfNeeded = enableColorEncoding;
  299. m_UseSwapBuffer = useSwapBuffer;
  300. // Disable obsolete warning for internal usage
  301. #pragma warning disable CS0618
  302. m_Destination = k_CameraTarget;
  303. #pragma warning restore CS0618
  304. }
  305. /// <inheritdoc/>
  306. [Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
  307. public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
  308. {
  309. overrideCameraTarget = true;
  310. }
  311. public bool CanRunOnTile()
  312. {
  313. // Check builtin & user effects here
  314. return false;
  315. }
  316. /// <inheritdoc/>
  317. [Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)]
  318. public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
  319. {
  320. // Start by pre-fetching all builtin effect settings we need
  321. // Some of the color-grading settings are only used in the color grading lut pass
  322. var stack = VolumeManager.instance.stack;
  323. m_DepthOfField = stack.GetComponent<DepthOfField>();
  324. m_MotionBlur = stack.GetComponent<MotionBlur>();
  325. m_LensFlareScreenSpace = stack.GetComponent<ScreenSpaceLensFlare>();
  326. m_PaniniProjection = stack.GetComponent<PaniniProjection>();
  327. m_Bloom = stack.GetComponent<Bloom>();
  328. m_LensDistortion = stack.GetComponent<LensDistortion>();
  329. m_ChromaticAberration = stack.GetComponent<ChromaticAberration>();
  330. m_Vignette = stack.GetComponent<Vignette>();
  331. m_ColorLookup = stack.GetComponent<ColorLookup>();
  332. m_ColorAdjustments = stack.GetComponent<ColorAdjustments>();
  333. m_Tonemapping = stack.GetComponent<Tonemapping>();
  334. m_FilmGrain = stack.GetComponent<FilmGrain>();
  335. m_UseFastSRGBLinearConversion = renderingData.postProcessingData.useFastSRGBLinearConversion;
  336. m_SupportScreenSpaceLensFlare = renderingData.postProcessingData.supportScreenSpaceLensFlare;
  337. m_SupportDataDrivenLensFlare = renderingData.postProcessingData.supportDataDrivenLensFlare;
  338. var cmd = renderingData.commandBuffer;
  339. if (m_IsFinalPass)
  340. {
  341. using (new ProfilingScope(cmd, m_ProfilingRenderFinalPostProcessing))
  342. {
  343. RenderFinalPass(cmd, ref renderingData);
  344. }
  345. }
  346. else if (CanRunOnTile())
  347. {
  348. // TODO: Add a fast render path if only on-tile compatible effects are used and we're actually running on a platform that supports it
  349. // Note: we can still work on-tile if FXAA is enabled, it'd be part of the final pass
  350. }
  351. else
  352. {
  353. // Regular render path (not on-tile) - we do everything in a single command buffer as it
  354. // makes it easier to manage temporary targets' lifetime
  355. using (new ProfilingScope(cmd, m_ProfilingRenderPostProcessing))
  356. {
  357. Render(cmd, ref renderingData);
  358. }
  359. }
  360. }
  361. bool IsHDRFormat(GraphicsFormat format)
  362. {
  363. return format == GraphicsFormat.B10G11R11_UFloatPack32 ||
  364. GraphicsFormatUtility.IsHalfFormat(format) ||
  365. GraphicsFormatUtility.IsFloatFormat(format);
  366. }
  367. bool IsAlphaFormat(GraphicsFormat format)
  368. {
  369. return GraphicsFormatUtility.HasAlphaChannel(format);
  370. }
  371. RenderTextureDescriptor GetCompatibleDescriptor()
  372. => GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, m_Descriptor.graphicsFormat);
  373. RenderTextureDescriptor GetCompatibleDescriptor(int width, int height, GraphicsFormat format, DepthBits depthBufferBits = DepthBits.None)
  374. => GetCompatibleDescriptor(m_Descriptor, width, height, format, depthBufferBits);
  375. internal static RenderTextureDescriptor GetCompatibleDescriptor(RenderTextureDescriptor desc, int width, int height, GraphicsFormat format, DepthBits depthBufferBits = DepthBits.None)
  376. {
  377. desc.depthBufferBits = (int)depthBufferBits;
  378. desc.msaaSamples = 1;
  379. desc.width = width;
  380. desc.height = height;
  381. desc.graphicsFormat = format;
  382. return desc;
  383. }
  384. bool RequireSRGBConversionBlitToBackBuffer(bool requireSrgbConversion)
  385. {
  386. return requireSrgbConversion && m_EnableColorEncodingIfNeeded;
  387. }
  388. bool RequireHDROutput(UniversalCameraData cameraData)
  389. {
  390. // If capturing, don't convert to HDR.
  391. // If not last in the stack, don't convert to HDR.
  392. return cameraData.isHDROutputActive && cameraData.captureActions == null;
  393. }
  394. void Render(CommandBuffer cmd, ref RenderingData renderingData)
  395. {
  396. UniversalCameraData cameraData = renderingData.frameData.Get<UniversalCameraData>();
  397. ref ScriptableRenderer renderer = ref cameraData.renderer;
  398. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  399. //Check amount of swaps we have to do
  400. //We blit back and forth without msaa until the last blit.
  401. bool useStopNan = cameraData.isStopNaNEnabled && m_Materials.stopNaN != null;
  402. bool useSubPixeMorpAA = cameraData.antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing;
  403. var dofMaterial = m_DepthOfField.mode.value == DepthOfFieldMode.Gaussian ? m_Materials.gaussianDepthOfField : m_Materials.bokehDepthOfField;
  404. bool useDepthOfField = m_DepthOfField.IsActive() && !isSceneViewCamera && dofMaterial != null;
  405. bool useLensFlare = !LensFlareCommonSRP.Instance.IsEmpty() && m_SupportDataDrivenLensFlare;
  406. bool useLensFlareScreenSpace = m_LensFlareScreenSpace.IsActive() && m_SupportScreenSpaceLensFlare;
  407. bool useMotionBlur = m_MotionBlur.IsActive() && !isSceneViewCamera;
  408. bool usePaniniProjection = m_PaniniProjection.IsActive() && !isSceneViewCamera;
  409. // Disable MotionBlur in EditMode, so that editing remains clear and readable.
  410. // NOTE: HDRP does the same via CoreUtils::AreAnimatedMaterialsEnabled().
  411. useMotionBlur = useMotionBlur && Application.isPlaying;
  412. // Note that enabling jitters uses the same CameraData::IsTemporalAAEnabled(). So if we add any other kind of overrides (like
  413. // disable useTemporalAA if another feature is disabled) then we need to put it in CameraData::IsTemporalAAEnabled() as opposed
  414. // to tweaking the value here.
  415. bool useTemporalAA = cameraData.IsTemporalAAEnabled();
  416. if (cameraData.antialiasing == AntialiasingMode.TemporalAntiAliasing && !useTemporalAA)
  417. TemporalAA.ValidateAndWarn(cameraData);
  418. int amountOfPassesRemaining = (useStopNan ? 1 : 0) + (useSubPixeMorpAA ? 1 : 0) + (useDepthOfField ? 1 : 0) + (useLensFlare ? 1 : 0) + (useTemporalAA ? 1 : 0) + (useMotionBlur ? 1 : 0) + (usePaniniProjection ? 1 : 0);
  419. if (m_UseSwapBuffer && amountOfPassesRemaining > 0)
  420. {
  421. renderer.EnableSwapBufferMSAA(false);
  422. }
  423. // Disable obsolete warning for internal usage
  424. #pragma warning disable CS0618
  425. // Don't use these directly unless you have a good reason to, use GetSource() and
  426. // GetDestination() instead
  427. RTHandle source = m_UseSwapBuffer ? renderer.cameraColorTargetHandle : m_Source;
  428. RTHandle destination = m_UseSwapBuffer ? renderer.GetCameraColorFrontBuffer(cmd) : null;
  429. #pragma warning restore CS0618
  430. RTHandle GetSource() => source;
  431. RTHandle GetDestination()
  432. {
  433. if (destination == null)
  434. {
  435. RenderingUtils.ReAllocateHandleIfNeeded(ref m_TempTarget, GetCompatibleDescriptor(), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_TempTarget");
  436. destination = m_TempTarget;
  437. }
  438. else if (destination == m_Source && m_Descriptor.msaaSamples > 1)
  439. {
  440. // Avoid using m_Source.id as new destination, it may come with a depth buffer that we don't want, may have MSAA that we don't want etc
  441. RenderingUtils.ReAllocateHandleIfNeeded(ref m_TempTarget2, GetCompatibleDescriptor(), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_TempTarget2");
  442. destination = m_TempTarget2;
  443. }
  444. return destination;
  445. }
  446. void Swap(ref ScriptableRenderer r)
  447. {
  448. --amountOfPassesRemaining;
  449. if (m_UseSwapBuffer)
  450. {
  451. r.SwapColorBuffer(cmd);
  452. // Disable obsolete warning for internal usage
  453. #pragma warning disable CS0618
  454. source = r.cameraColorTargetHandle;
  455. #pragma warning restore CS0618
  456. //we want the last blit to be to MSAA
  457. if (amountOfPassesRemaining == 0 && !m_HasFinalPass)
  458. r.EnableSwapBufferMSAA(true);
  459. // Disable obsolete warning for internal usage
  460. #pragma warning disable CS0618
  461. destination = r.GetCameraColorFrontBuffer(cmd);
  462. #pragma warning restore CS0618
  463. }
  464. else
  465. {
  466. CoreUtils.Swap(ref source, ref destination);
  467. }
  468. }
  469. // Setup projection matrix for cmd.DrawMesh()
  470. cmd.SetGlobalMatrix(ShaderConstants._FullscreenProjMat, GL.GetGPUProjectionMatrix(Matrix4x4.identity, true));
  471. // Optional NaN killer before post-processing kicks in
  472. // stopNaN may be null on Adreno 3xx. It doesn't support full shader level 3.5, but SystemInfo.graphicsShaderLevel is 35.
  473. if (useStopNan)
  474. {
  475. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.StopNaNs)))
  476. {
  477. Blitter.BlitCameraTexture(cmd, GetSource(), GetDestination(), RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, m_Materials.stopNaN, 0);
  478. Swap(ref renderer);
  479. }
  480. }
  481. // Anti-aliasing
  482. if (useSubPixeMorpAA)
  483. {
  484. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.SMAA)))
  485. {
  486. DoSubpixelMorphologicalAntialiasing(ref renderingData.cameraData, cmd, GetSource(), GetDestination());
  487. Swap(ref renderer);
  488. }
  489. }
  490. // Depth of Field
  491. // Adreno 3xx SystemInfo.graphicsShaderLevel is 35, but instancing support is disabled due to buggy drivers.
  492. // DOF shader uses #pragma target 3.5 which adds requirement for instancing support, thus marking the shader unsupported on those devices.
  493. if (useDepthOfField)
  494. {
  495. var markerName = m_DepthOfField.mode.value == DepthOfFieldMode.Gaussian
  496. ? URPProfileId.GaussianDepthOfField
  497. : URPProfileId.BokehDepthOfField;
  498. using (new ProfilingScope(cmd, ProfilingSampler.Get(markerName)))
  499. {
  500. DoDepthOfField(ref renderingData.cameraData, cmd, GetSource(), GetDestination(), cameraData.pixelRect);
  501. Swap(ref renderer);
  502. }
  503. }
  504. // Temporal Anti Aliasing
  505. if (useTemporalAA)
  506. {
  507. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.TemporalAA)))
  508. {
  509. Debug.Assert(m_MotionVectors != null, "MotionVectors are invalid. TAA requires a motion vector texture.");
  510. TemporalAA.ExecutePass(cmd, m_Materials.temporalAntialiasing, ref renderingData.cameraData, source, destination, m_MotionVectors?.rt);
  511. Swap(ref renderer);
  512. }
  513. }
  514. // Motion blur
  515. if (useMotionBlur)
  516. {
  517. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.MotionBlur)))
  518. {
  519. DoMotionBlur(cmd, GetSource(), GetDestination(), m_MotionVectors, ref renderingData.cameraData);
  520. Swap(ref renderer);
  521. }
  522. }
  523. // Panini projection is done as a fullscreen pass after all depth-based effects are done
  524. // and before bloom kicks in
  525. if (usePaniniProjection)
  526. {
  527. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.PaniniProjection)))
  528. {
  529. DoPaniniProjection(cameraData.camera, cmd, GetSource(), GetDestination());
  530. Swap(ref renderer);
  531. }
  532. }
  533. // Combined post-processing stack
  534. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.UberPostProcess)))
  535. {
  536. // Reset uber keywords
  537. m_Materials.uber.shaderKeywords = null;
  538. // Bloom goes first
  539. bool bloomActive = m_Bloom.IsActive();
  540. bool lensFlareScreenSpaceActive = m_LensFlareScreenSpace.IsActive();
  541. // We need to still do the bloom pass if lens flare screen space is active because it uses _Bloom_Texture.
  542. if (bloomActive || lensFlareScreenSpaceActive)
  543. {
  544. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.Bloom)))
  545. SetupBloom(cmd, GetSource(), m_Materials.uber, cameraData.isAlphaOutputEnabled);
  546. }
  547. // Lens Flare Screen Space
  548. if (useLensFlareScreenSpace)
  549. {
  550. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.LensFlareScreenSpace)))
  551. {
  552. // We clamp the bloomMip value to avoid picking a mip that doesn't exist, since in URP you can set the number of maxIteration of the bloomPass.
  553. int maxBloomMip = Mathf.Clamp(m_LensFlareScreenSpace.bloomMip.value, 0, m_Bloom.maxIterations.value/2);
  554. DoLensFlareScreenSpace(cameraData.camera, cmd, GetSource(), m_BloomMipUp[0], m_BloomMipUp[maxBloomMip]);
  555. }
  556. }
  557. // Lens Flare
  558. if (useLensFlare)
  559. {
  560. bool usePanini;
  561. float paniniDistance;
  562. float paniniCropToFit;
  563. if (m_PaniniProjection.IsActive())
  564. {
  565. usePanini = true;
  566. paniniDistance = m_PaniniProjection.distance.value;
  567. paniniCropToFit = m_PaniniProjection.cropToFit.value;
  568. }
  569. else
  570. {
  571. usePanini = false;
  572. paniniDistance = 1.0f;
  573. paniniCropToFit = 1.0f;
  574. }
  575. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.LensFlareDataDrivenComputeOcclusion)))
  576. {
  577. LensFlareDataDrivenComputeOcclusion(ref cameraData, cmd, GetSource(), usePanini, paniniDistance, paniniCropToFit);
  578. }
  579. using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.LensFlareDataDriven)))
  580. {
  581. LensFlareDataDriven(ref cameraData, cmd, GetSource(), usePanini, paniniDistance, paniniCropToFit);
  582. }
  583. }
  584. // Setup other effects constants
  585. SetupLensDistortion(m_Materials.uber, isSceneViewCamera);
  586. SetupChromaticAberration(m_Materials.uber);
  587. SetupVignette(m_Materials.uber, cameraData.xr);
  588. SetupColorGrading(cmd, ref renderingData, m_Materials.uber);
  589. // Only apply dithering & grain if there isn't a final pass.
  590. SetupGrain(cameraData, m_Materials.uber);
  591. SetupDithering(cameraData, m_Materials.uber);
  592. if (RequireSRGBConversionBlitToBackBuffer(cameraData.requireSrgbConversion))
  593. m_Materials.uber.EnableKeyword(ShaderKeywordStrings.LinearToSRGBConversion);
  594. bool requireHDROutput = RequireHDROutput(cameraData);
  595. if (requireHDROutput)
  596. {
  597. // Color space conversion is already applied through color grading, do encoding if uber post is the last pass
  598. // Otherwise encoding will happen in the final post process pass or the final blit pass
  599. HDROutputUtils.Operation hdrOperation = !m_HasFinalPass && m_EnableColorEncodingIfNeeded ? HDROutputUtils.Operation.ColorEncoding : HDROutputUtils.Operation.None;
  600. SetupHDROutput(cameraData.hdrDisplayInformation, cameraData.hdrDisplayColorGamut, m_Materials.uber, hdrOperation);
  601. }
  602. if (m_UseFastSRGBLinearConversion)
  603. {
  604. m_Materials.uber.EnableKeyword(ShaderKeywordStrings.UseFastSRGBLinearConversion);
  605. }
  606. CoreUtils.SetKeyword(m_Materials.uber, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, cameraData.isAlphaOutputEnabled);
  607. DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
  608. bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(cameraData.resolveFinalTarget);
  609. debugHandler?.UpdateShaderGlobalPropertiesForFinalValidationPass(cmd, cameraData, !m_HasFinalPass && !resolveToDebugScreen);
  610. // Done with Uber, blit it
  611. var colorLoadAction = RenderBufferLoadAction.DontCare;
  612. // Disable obsolete warning for internal usage
  613. #pragma warning disable CS0618
  614. if (m_Destination == k_CameraTarget && !cameraData.isDefaultViewport)
  615. colorLoadAction = RenderBufferLoadAction.Load;
  616. #pragma warning restore CS0618
  617. // Note: We rendering to "camera target" we need to get the cameraData.targetTexture as this will get the targetTexture of the camera stack.
  618. // Overlay cameras need to output to the target described in the base camera while doing camera stack.
  619. RenderTargetIdentifier cameraTargetID = BuiltinRenderTextureType.CameraTarget;
  620. #if ENABLE_VR && ENABLE_XR_MODULE
  621. if (cameraData.xr.enabled)
  622. cameraTargetID = cameraData.xr.renderTarget;
  623. #endif
  624. if (!m_UseSwapBuffer)
  625. m_ResolveToScreen = cameraData.resolveFinalTarget || m_Destination.nameID == cameraTargetID || m_HasFinalPass == true;
  626. // With camera stacking we not always resolve post to final screen as we might run post-processing in the middle of the stack.
  627. if (m_UseSwapBuffer && !m_ResolveToScreen)
  628. {
  629. if (!m_HasFinalPass)
  630. {
  631. // We need to reenable this to be able to blit to the correct AA target
  632. renderer.EnableSwapBufferMSAA(true);
  633. // Disable obsolete warning for internal usage
  634. #pragma warning disable CS0618
  635. destination = renderer.GetCameraColorFrontBuffer(cmd);
  636. #pragma warning restore CS0618
  637. }
  638. Blitter.BlitCameraTexture(cmd, GetSource(), destination, colorLoadAction, RenderBufferStoreAction.Store, m_Materials.uber, 0);
  639. // Disable obsolete warning for internal usage
  640. #pragma warning disable CS0618
  641. renderer.ConfigureCameraColorTarget(destination);
  642. #pragma warning restore CS0618
  643. Swap(ref renderer);
  644. }
  645. // TODO: Implement swapbuffer in 2DRenderer so we can remove this
  646. // For now, when render post-processing in the middle of the camera stack (not resolving to screen)
  647. // we do an extra blit to ping pong results back to color texture. In future we should allow a Swap of the current active color texture
  648. // in the pipeline to avoid this extra blit.
  649. else if (!m_UseSwapBuffer)
  650. {
  651. var firstSource = GetSource();
  652. Blitter.BlitCameraTexture(cmd, firstSource, GetDestination(), colorLoadAction, RenderBufferStoreAction.Store, m_Materials.uber, 0);
  653. Blitter.BlitCameraTexture(cmd, GetDestination(), m_Destination, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, m_BlitMaterial, m_Destination.rt?.filterMode == FilterMode.Bilinear ? 1 : 0);
  654. }
  655. else if (m_ResolveToScreen)
  656. {
  657. if (resolveToDebugScreen)
  658. {
  659. // Blit to the debugger texture instead of the camera target
  660. Blitter.BlitCameraTexture(cmd, GetSource(), debugHandler.DebugScreenColorHandle, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, m_Materials.uber, 0);
  661. // Disable obsolete warning for internal usage
  662. #pragma warning disable CS0618
  663. renderer.ConfigureCameraTarget(debugHandler.DebugScreenColorHandle, debugHandler.DebugScreenDepthHandle);
  664. #pragma warning restore CS0618
  665. }
  666. else
  667. {
  668. // Get RTHandle alias to use RTHandle apis
  669. RenderTargetIdentifier cameraTarget = cameraData.targetTexture != null ? new RenderTargetIdentifier(cameraData.targetTexture) : cameraTargetID;
  670. RTHandleStaticHelpers.SetRTHandleStaticWrapper(cameraTarget);
  671. var cameraTargetHandle = RTHandleStaticHelpers.s_RTHandleWrapper;
  672. RenderingUtils.FinalBlit(cmd, cameraData, GetSource(), cameraTargetHandle, colorLoadAction, RenderBufferStoreAction.Store, m_Materials.uber, 0);
  673. // Disable obsolete warning for internal usage
  674. #pragma warning disable CS0618
  675. renderer.ConfigureCameraColorTarget(cameraTargetHandle);
  676. #pragma warning restore CS0618
  677. }
  678. }
  679. }
  680. }
  681. #region Sub-pixel Morphological Anti-aliasing
  682. void DoSubpixelMorphologicalAntialiasing(ref CameraData cameraData, CommandBuffer cmd, RTHandle source, RTHandle destination)
  683. {
  684. var camera = cameraData.camera;
  685. var pixelRect = new Rect(Vector2.zero, new Vector2(cameraData.cameraTargetDescriptor.width, cameraData.cameraTargetDescriptor.height));
  686. var material = m_Materials.subpixelMorphologicalAntialiasing;
  687. const int kStencilBit = 64;
  688. // Intermediate targets
  689. RTHandle stencil; // We would only need stencil, no depth. But Unity doesn't support that.
  690. if (m_Depth.nameID == BuiltinRenderTextureType.CameraTarget || m_Descriptor.msaaSamples > 1)
  691. {
  692. // In case m_Depth is CameraTarget it may refer to the backbuffer and we can't use that as an attachment on all platforms
  693. RenderingUtils.ReAllocateHandleIfNeeded(ref m_EdgeStencilTexture, GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, GraphicsFormat.None, DepthBits.Depth24), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_EdgeStencilTexture");
  694. stencil = m_EdgeStencilTexture;
  695. }
  696. else
  697. {
  698. stencil = m_Depth;
  699. }
  700. RenderingUtils.ReAllocateHandleIfNeeded(ref m_EdgeColorTexture, GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, m_SMAAEdgeFormat), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_EdgeColorTexture");
  701. RenderingUtils.ReAllocateHandleIfNeeded(ref m_BlendTexture, GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, GraphicsFormat.R8G8B8A8_UNorm), FilterMode.Point, TextureWrapMode.Clamp, name: "_BlendTexture");
  702. // Globals
  703. var targetSize = m_EdgeColorTexture.useScaling ? m_EdgeColorTexture.rtHandleProperties.currentRenderTargetSize : new Vector2Int(m_EdgeColorTexture.rt.width, m_EdgeColorTexture.rt.height);
  704. material.SetVector(ShaderConstants._Metrics, new Vector4(1f / targetSize.x, 1f / targetSize.y, targetSize.x, targetSize.y));
  705. material.SetTexture(ShaderConstants._AreaTexture, m_Data.textures.smaaAreaTex);
  706. material.SetTexture(ShaderConstants._SearchTexture, m_Data.textures.smaaSearchTex);
  707. material.SetFloat(ShaderConstants._StencilRef, (float)kStencilBit);
  708. material.SetFloat(ShaderConstants._StencilMask, (float)kStencilBit);
  709. // Quality presets
  710. material.shaderKeywords = null;
  711. switch (cameraData.antialiasingQuality)
  712. {
  713. case AntialiasingQuality.Low:
  714. material.EnableKeyword(ShaderKeywordStrings.SmaaLow);
  715. break;
  716. case AntialiasingQuality.Medium:
  717. material.EnableKeyword(ShaderKeywordStrings.SmaaMedium);
  718. break;
  719. case AntialiasingQuality.High:
  720. material.EnableKeyword(ShaderKeywordStrings.SmaaHigh);
  721. break;
  722. }
  723. // Pass 1: Edge detection
  724. RenderingUtils.Blit(cmd, source, pixelRect,
  725. m_EdgeColorTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store,
  726. stencil, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store,
  727. ClearFlag.ColorStencil, Color.clear, // implicit depth=1.0f stencil=0x0
  728. material, 0);
  729. // Pass 2: Blend weights
  730. RenderingUtils.Blit(cmd, m_EdgeColorTexture, pixelRect,
  731. m_BlendTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store,
  732. stencil, RenderBufferLoadAction.Load, RenderBufferStoreAction.DontCare,
  733. ClearFlag.Color, Color.clear, material, 1);
  734. // Pass 3: Neighborhood blending
  735. cmd.SetGlobalTexture(ShaderConstants._BlendTexture, m_BlendTexture.nameID);
  736. Blitter.BlitCameraTexture(cmd, source, destination, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, 2);
  737. }
  738. #endregion
  739. #region Depth Of Field
  740. // TODO: CoC reprojection once TAA gets in LW
  741. // TODO: Proper LDR/gamma support
  742. void DoDepthOfField(ref CameraData cameraData, CommandBuffer cmd, RTHandle source, RTHandle destination, Rect pixelRect)
  743. {
  744. if (m_DepthOfField.mode.value == DepthOfFieldMode.Gaussian)
  745. DoGaussianDepthOfField(cmd, source, destination, pixelRect, cameraData.isAlphaOutputEnabled);
  746. else if (m_DepthOfField.mode.value == DepthOfFieldMode.Bokeh)
  747. DoBokehDepthOfField(cmd, source, destination, pixelRect, cameraData.isAlphaOutputEnabled);
  748. }
  749. void DoGaussianDepthOfField(CommandBuffer cmd, RTHandle source, RTHandle destination, Rect pixelRect, bool enableAlphaOutput)
  750. {
  751. int downSample = 2;
  752. var material = m_Materials.gaussianDepthOfField;
  753. int wh = m_Descriptor.width / downSample;
  754. int hh = m_Descriptor.height / downSample;
  755. float farStart = m_DepthOfField.gaussianStart.value;
  756. float farEnd = Mathf.Max(farStart, m_DepthOfField.gaussianEnd.value);
  757. // Assumes a radius of 1 is 1 at 1080p
  758. // Past a certain radius our gaussian kernel will look very bad so we'll clamp it for
  759. // very high resolutions (4K+).
  760. float maxRadius = m_DepthOfField.gaussianMaxRadius.value * (wh / 1080f);
  761. maxRadius = Mathf.Min(maxRadius, 2f);
  762. CoreUtils.SetKeyword(material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, enableAlphaOutput);
  763. CoreUtils.SetKeyword(material, ShaderKeywordStrings.HighQualitySampling, m_DepthOfField.highQualitySampling.value);
  764. material.SetVector(ShaderConstants._CoCParams, new Vector3(farStart, farEnd, maxRadius));
  765. RenderingUtils.ReAllocateHandleIfNeeded(ref m_FullCoCTexture, GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, m_GaussianCoCFormat), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_FullCoCTexture");
  766. RenderingUtils.ReAllocateHandleIfNeeded(ref m_HalfCoCTexture, GetCompatibleDescriptor(wh, hh, m_GaussianCoCFormat), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_HalfCoCTexture");
  767. RenderingUtils.ReAllocateHandleIfNeeded(ref m_PingTexture, GetCompatibleDescriptor(wh, hh, GraphicsFormat.R16G16B16A16_SFloat), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_PingTexture");
  768. RenderingUtils.ReAllocateHandleIfNeeded(ref m_PongTexture, GetCompatibleDescriptor(wh, hh, GraphicsFormat.R16G16B16A16_SFloat), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_PongTexture");
  769. PostProcessUtils.SetSourceSize(cmd, m_FullCoCTexture);
  770. cmd.SetGlobalVector(ShaderConstants._DownSampleScaleFactor, new Vector4(1.0f / downSample, 1.0f / downSample, downSample, downSample));
  771. // Compute CoC
  772. Blitter.BlitCameraTexture(cmd, source, m_FullCoCTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, k_GaussianDoFPassComputeCoc);
  773. // Downscale & prefilter color + coc
  774. m_MRT2[0] = m_HalfCoCTexture.nameID;
  775. m_MRT2[1] = m_PingTexture.nameID;
  776. cmd.SetGlobalTexture(ShaderConstants._FullCoCTexture, m_FullCoCTexture.nameID);
  777. CoreUtils.SetRenderTarget(cmd, m_MRT2, m_HalfCoCTexture);
  778. Vector2 viewportScale = source.useScaling ? new Vector2(source.rtHandleProperties.rtHandleScale.x, source.rtHandleProperties.rtHandleScale.y) : Vector2.one;
  779. Blitter.BlitTexture(cmd, source, viewportScale, material, k_GaussianDoFPassDownscalePrefilter);
  780. // Blur
  781. cmd.SetGlobalTexture(ShaderConstants._HalfCoCTexture, m_HalfCoCTexture.nameID);
  782. cmd.SetGlobalTexture(ShaderConstants._ColorTexture, source);
  783. Blitter.BlitCameraTexture(cmd, m_PingTexture, m_PongTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, k_GaussianDoFPassBlurH);
  784. Blitter.BlitCameraTexture(cmd, m_PongTexture, m_PingTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, k_GaussianDoFPassBlurV);
  785. // Composite
  786. cmd.SetGlobalTexture(ShaderConstants._ColorTexture, m_PingTexture.nameID);
  787. cmd.SetGlobalTexture(ShaderConstants._FullCoCTexture, m_FullCoCTexture.nameID);
  788. Blitter.BlitCameraTexture(cmd, source, destination, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, k_GaussianDoFPassComposite);
  789. }
  790. void PrepareBokehKernel(float maxRadius, float rcpAspect)
  791. {
  792. const int kRings = 4;
  793. const int kPointsPerRing = 7;
  794. // Check the existing array
  795. if (m_BokehKernel == null)
  796. m_BokehKernel = new Vector4[42];
  797. // Fill in sample points (concentric circles transformed to rotated N-Gon)
  798. int idx = 0;
  799. float bladeCount = m_DepthOfField.bladeCount.value;
  800. float curvature = 1f - m_DepthOfField.bladeCurvature.value;
  801. float rotation = m_DepthOfField.bladeRotation.value * Mathf.Deg2Rad;
  802. const float PI = Mathf.PI;
  803. const float TWO_PI = Mathf.PI * 2f;
  804. for (int ring = 1; ring < kRings; ring++)
  805. {
  806. float bias = 1f / kPointsPerRing;
  807. float radius = (ring + bias) / (kRings - 1f + bias);
  808. int points = ring * kPointsPerRing;
  809. for (int point = 0; point < points; point++)
  810. {
  811. // Angle on ring
  812. float phi = 2f * PI * point / points;
  813. // Transform to rotated N-Gon
  814. // Adapted from "CryEngine 3 Graphics Gems" [Sousa13]
  815. float nt = Mathf.Cos(PI / bladeCount);
  816. float dt = Mathf.Cos(phi - (TWO_PI / bladeCount) * Mathf.Floor((bladeCount * phi + Mathf.PI) / TWO_PI));
  817. float r = radius * Mathf.Pow(nt / dt, curvature);
  818. float u = r * Mathf.Cos(phi - rotation);
  819. float v = r * Mathf.Sin(phi - rotation);
  820. float uRadius = u * maxRadius;
  821. float vRadius = v * maxRadius;
  822. float uRadiusPowTwo = uRadius * uRadius;
  823. float vRadiusPowTwo = vRadius * vRadius;
  824. float kernelLength = Mathf.Sqrt((uRadiusPowTwo + vRadiusPowTwo));
  825. float uRCP = uRadius * rcpAspect;
  826. m_BokehKernel[idx] = new Vector4(uRadius, vRadius, kernelLength, uRCP);
  827. idx++;
  828. }
  829. }
  830. }
  831. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  832. static float GetMaxBokehRadiusInPixels(float viewportHeight)
  833. {
  834. // Estimate the maximum radius of bokeh (empirically derived from the ring count)
  835. const float kRadiusInPixels = 14f;
  836. return Mathf.Min(0.05f, kRadiusInPixels / viewportHeight);
  837. }
  838. void DoBokehDepthOfField(CommandBuffer cmd, RTHandle source, RTHandle destination, Rect pixelRect, bool enableAlphaOutput)
  839. {
  840. int downSample = 2;
  841. var material = m_Materials.bokehDepthOfField;
  842. int wh = m_Descriptor.width / downSample;
  843. int hh = m_Descriptor.height / downSample;
  844. // "A Lens and Aperture Camera Model for Synthetic Image Generation" [Potmesil81]
  845. float F = m_DepthOfField.focalLength.value / 1000f;
  846. float A = m_DepthOfField.focalLength.value / m_DepthOfField.aperture.value;
  847. float P = m_DepthOfField.focusDistance.value;
  848. float maxCoC = (A * F) / (P - F);
  849. float maxRadius = GetMaxBokehRadiusInPixels(m_Descriptor.height);
  850. float rcpAspect = 1f / (wh / (float)hh);
  851. CoreUtils.SetKeyword(material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, enableAlphaOutput);
  852. CoreUtils.SetKeyword(material, ShaderKeywordStrings.UseFastSRGBLinearConversion, m_UseFastSRGBLinearConversion);
  853. cmd.SetGlobalVector(ShaderConstants._CoCParams, new Vector4(P, maxCoC, maxRadius, rcpAspect));
  854. // Prepare the bokeh kernel constant buffer
  855. int hash = m_DepthOfField.GetHashCode();
  856. if (hash != m_BokehHash || maxRadius != m_BokehMaxRadius || rcpAspect != m_BokehRCPAspect)
  857. {
  858. m_BokehHash = hash;
  859. m_BokehMaxRadius = maxRadius;
  860. m_BokehRCPAspect = rcpAspect;
  861. PrepareBokehKernel(maxRadius, rcpAspect);
  862. }
  863. cmd.SetGlobalVectorArray(ShaderConstants._BokehKernel, m_BokehKernel);
  864. RenderingUtils.ReAllocateHandleIfNeeded(ref m_FullCoCTexture, GetCompatibleDescriptor(m_Descriptor.width, m_Descriptor.height, GraphicsFormat.R8_UNorm), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_FullCoCTexture");
  865. RenderingUtils.ReAllocateHandleIfNeeded(ref m_PingTexture, GetCompatibleDescriptor(wh, hh, GraphicsFormat.R16G16B16A16_SFloat), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_PingTexture");
  866. RenderingUtils.ReAllocateHandleIfNeeded(ref m_PongTexture, GetCompatibleDescriptor(wh, hh, GraphicsFormat.R16G16B16A16_SFloat), FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_PongTexture");
  867. PostProcessUtils.SetSourceSize(cmd, m_FullCoCTexture);
  868. cmd.SetGlobalVector(ShaderConstants._DownSampleScaleFactor, new Vector4(1.0f / downSample, 1.0f / downSample, downSample, downSample));
  869. float uvMargin = (1.0f / m_Descriptor.height) * downSample;
  870. cmd.SetGlobalVector(ShaderConstants._BokehConstants, new Vector4(uvMargin, uvMargin * 2.0f));
  871. // Compute CoC
  872. Blitter.BlitCameraTexture(cmd, source, m_FullCoCTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, k_BokehDoFPassComputeCoc);
  873. cmd.SetGlobalTexture(ShaderConstants._FullCoCTexture, m_FullCoCTexture.nameID);
  874. // Downscale & prefilter color + coc
  875. Blitter.BlitCameraTexture(cmd, source, m_PingTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, k_BokehDoFPassDownscalePrefilter);
  876. // Bokeh blur
  877. Blitter.BlitCameraTexture(cmd, m_PingTexture, m_PongTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, k_BokehDoFPassBlur);
  878. // Post-filtering
  879. Blitter.BlitCameraTexture(cmd, m_PongTexture, m_PingTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, k_BokehDoFPassPostFilter);
  880. // Composite
  881. cmd.SetGlobalTexture(ShaderConstants._DofTexture, m_PingTexture.nameID);
  882. Blitter.BlitCameraTexture(cmd, source, destination, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, k_BokehDoFPassComposite);
  883. }
  884. #endregion
  885. #region LensFlareDataDriven
  886. static float GetLensFlareLightAttenuation(Light light, Camera cam, Vector3 wo)
  887. {
  888. // Must always be true
  889. if (light != null)
  890. {
  891. switch (light.type)
  892. {
  893. case LightType.Directional:
  894. return LensFlareCommonSRP.ShapeAttenuationDirLight(light.transform.forward, cam.transform.forward);
  895. case LightType.Point:
  896. return LensFlareCommonSRP.ShapeAttenuationPointLight();
  897. case LightType.Spot:
  898. return LensFlareCommonSRP.ShapeAttenuationSpotConeLight(light.transform.forward, wo, light.spotAngle, light.innerSpotAngle / 180.0f);
  899. default:
  900. return 1.0f;
  901. }
  902. }
  903. return 1.0f;
  904. }
  905. void LensFlareDataDrivenComputeOcclusion(ref UniversalCameraData cameraData, CommandBuffer cmd, RenderTargetIdentifier source, bool usePanini, float paniniDistance, float paniniCropToFit)
  906. {
  907. if (!LensFlareCommonSRP.IsOcclusionRTCompatible())
  908. return;
  909. Camera camera = cameraData.camera;
  910. Matrix4x4 nonJitteredViewProjMatrix0;
  911. int xrId0;
  912. #if ENABLE_VR && ENABLE_XR_MODULE
  913. // Not VR or Multi-Pass
  914. if (cameraData.xr.enabled)
  915. {
  916. if (cameraData.xr.singlePassEnabled)
  917. {
  918. nonJitteredViewProjMatrix0 = GL.GetGPUProjectionMatrix(cameraData.GetProjectionMatrixNoJitter(0), true) * cameraData.GetViewMatrix(0);
  919. xrId0 = 0;
  920. }
  921. else
  922. {
  923. var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, true);
  924. nonJitteredViewProjMatrix0 = gpuNonJitteredProj * camera.worldToCameraMatrix;
  925. xrId0 = cameraData.xr.multipassId;
  926. }
  927. }
  928. else
  929. {
  930. nonJitteredViewProjMatrix0 = GL.GetGPUProjectionMatrix(cameraData.GetProjectionMatrixNoJitter(0), true) * cameraData.GetViewMatrix(0);
  931. xrId0 = 0;
  932. }
  933. #else
  934. var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, true);
  935. nonJitteredViewProjMatrix0 = gpuNonJitteredProj * camera.worldToCameraMatrix;
  936. xrId0 = cameraData.xr.multipassId;
  937. #endif
  938. cmd.SetGlobalTexture(m_Depth.name, m_Depth.nameID);
  939. LensFlareCommonSRP.ComputeOcclusion(
  940. m_Materials.lensFlareDataDriven, camera, cameraData.xr, cameraData.xr.multipassId,
  941. (float)m_Descriptor.width, (float)m_Descriptor.height,
  942. usePanini, paniniDistance, paniniCropToFit, true,
  943. camera.transform.position,
  944. nonJitteredViewProjMatrix0,
  945. cmd,
  946. false, false, null, null);
  947. #if ENABLE_VR && ENABLE_XR_MODULE
  948. if (cameraData.xr.enabled && cameraData.xr.singlePassEnabled)
  949. {
  950. for (int xrIdx = 1; xrIdx < cameraData.xr.viewCount; ++xrIdx)
  951. {
  952. Matrix4x4 gpuVPXR = GL.GetGPUProjectionMatrix(cameraData.GetProjectionMatrixNoJitter(xrIdx), true) * cameraData.GetViewMatrix(xrIdx);
  953. cmd.SetGlobalTexture(m_Depth.name, m_Depth.nameID);
  954. // Bypass single pass version
  955. LensFlareCommonSRP.ComputeOcclusion(
  956. m_Materials.lensFlareDataDriven, camera, cameraData.xr, xrIdx,
  957. (float)m_Descriptor.width, (int)m_Descriptor.height,
  958. usePanini, paniniDistance, paniniCropToFit, true,
  959. camera.transform.position,
  960. gpuVPXR,
  961. cmd,
  962. false, false, null, null);
  963. }
  964. }
  965. #endif
  966. }
  967. void LensFlareDataDriven(ref UniversalCameraData cameraData, CommandBuffer cmd, RenderTargetIdentifier source, bool usePanini, float paniniDistance, float paniniCropToFit)
  968. {
  969. Camera camera = cameraData.camera;
  970. #if ENABLE_VR && ENABLE_XR_MODULE
  971. // Not VR or Multi-Pass
  972. if (!cameraData.xr.enabled ||
  973. (cameraData.xr.enabled && !cameraData.xr.singlePassEnabled))
  974. {
  975. #endif
  976. var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, true);
  977. var gpuVP = gpuNonJitteredProj * camera.worldToCameraMatrix;
  978. LensFlareCommonSRP.DoLensFlareDataDrivenCommon(
  979. m_Materials.lensFlareDataDriven, camera, camera.pixelRect, cameraData.xr, cameraData.xr.multipassId,
  980. (float)m_Descriptor.width, (float)m_Descriptor.height,
  981. usePanini, paniniDistance, paniniCropToFit, true,
  982. camera.transform.position,
  983. gpuVP,
  984. cmd,
  985. false, false, null, null,
  986. source,
  987. (Light light, Camera cam, Vector3 wo) => { return GetLensFlareLightAttenuation(light, cam, wo); },
  988. false);
  989. #if ENABLE_VR && ENABLE_XR_MODULE
  990. }
  991. else // data.hdCamera.xr.enabled && data.hdCamera.xr.singlePassEnabled
  992. {
  993. // Bypass single pass version
  994. for (int xrIdx = 0; xrIdx < cameraData.xr.viewCount; ++xrIdx)
  995. {
  996. Matrix4x4 gpuVPXR = GL.GetGPUProjectionMatrix(cameraData.GetProjectionMatrixNoJitter(xrIdx), true) * cameraData.GetViewMatrix(xrIdx);
  997. LensFlareCommonSRP.DoLensFlareDataDrivenCommon(
  998. m_Materials.lensFlareDataDriven, camera, camera.pixelRect, cameraData.xr, cameraData.xr.multipassId,
  999. (float)m_Descriptor.width, (float)m_Descriptor.height,
  1000. usePanini, paniniDistance, paniniCropToFit, true,
  1001. camera.transform.position,
  1002. gpuVPXR,
  1003. cmd,
  1004. false, false, null, null,
  1005. source,
  1006. (Light light, Camera cam, Vector3 wo) => { return GetLensFlareLightAttenuation(light, cam, wo); },
  1007. false);
  1008. }
  1009. }
  1010. #endif
  1011. }
  1012. #endregion
  1013. #region LensFlareScreenSpace
  1014. void DoLensFlareScreenSpace(Camera camera, CommandBuffer cmd, RenderTargetIdentifier source, RTHandle originalBloomTexture, RTHandle screenSpaceLensFlareBloomMipTexture)
  1015. {
  1016. int ratio = (int)m_LensFlareScreenSpace.resolution.value;
  1017. int width = Mathf.Max(1, (int)m_Descriptor.width / ratio);
  1018. int height = Mathf.Max(1, (int)m_Descriptor.height / ratio);
  1019. var desc = GetCompatibleDescriptor(width, height, m_DefaultColorFormat);
  1020. if (m_LensFlareScreenSpace.IsStreaksActive())
  1021. {
  1022. RenderingUtils.ReAllocateHandleIfNeeded(ref m_StreakTmpTexture, desc, FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_StreakTmpTexture");
  1023. RenderingUtils.ReAllocateHandleIfNeeded(ref m_StreakTmpTexture2, desc, FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_StreakTmpTexture2");
  1024. }
  1025. RenderingUtils.ReAllocateHandleIfNeeded(ref m_ScreenSpaceLensFlareResult, desc, FilterMode.Bilinear, TextureWrapMode.Clamp, name: "_ScreenSpaceLensFlareResult");
  1026. LensFlareCommonSRP.DoLensFlareScreenSpaceCommon(
  1027. m_Materials.lensFlareScreenSpace,
  1028. camera,
  1029. (float)m_Descriptor.width,
  1030. (float)m_Descriptor.height,
  1031. m_LensFlareScreenSpace.tintColor.value,
  1032. originalBloomTexture,
  1033. screenSpaceLensFlareBloomMipTexture,
  1034. null, // We don't have any spectral LUT in URP
  1035. m_StreakTmpTexture,
  1036. m_StreakTmpTexture2,
  1037. new Vector4(
  1038. m_LensFlareScreenSpace.intensity.value,
  1039. m_LensFlareScreenSpace.firstFlareIntensity.value,
  1040. m_LensFlareScreenSpace.secondaryFlareIntensity.value,
  1041. m_LensFlareScreenSpace.warpedFlareIntensity.value),
  1042. new Vector4(
  1043. m_LensFlareScreenSpace.vignetteEffect.value,
  1044. m_LensFlareScreenSpace.startingPosition.value,
  1045. m_LensFlareScreenSpace.scale.value,
  1046. 0), // Free slot, not used
  1047. new Vector4(
  1048. m_LensFlareScreenSpace.samples.value,
  1049. m_LensFlareScreenSpace.sampleDimmer.value,
  1050. m_LensFlareScreenSpace.chromaticAbberationIntensity.value,
  1051. 0), // No need to pass a chromatic aberration sample count, hardcoded at 3 in shader
  1052. new Vector4(
  1053. m_LensFlareScreenSpace.streaksIntensity.value,
  1054. m_LensFlareScreenSpace.streaksLength.value,
  1055. m_LensFlareScreenSpace.streaksOrientation.value,
  1056. m_LensFlareScreenSpace.streaksThreshold.value),
  1057. new Vector4(
  1058. ratio,
  1059. m_LensFlareScreenSpace.warpedFlareScale.value.x,
  1060. m_LensFlareScreenSpace.warpedFlareScale.value.y,
  1061. 0), // Free slot, not used
  1062. cmd,
  1063. m_ScreenSpaceLensFlareResult,
  1064. false);
  1065. cmd.SetGlobalTexture(ShaderConstants._Bloom_Texture, originalBloomTexture);
  1066. }
  1067. #endregion
  1068. #region Motion Blur
  1069. internal static readonly int k_ShaderPropertyId_ViewProjM = Shader.PropertyToID("_ViewProjM");
  1070. internal static readonly int k_ShaderPropertyId_PrevViewProjM = Shader.PropertyToID("_PrevViewProjM");
  1071. internal static readonly int k_ShaderPropertyId_ViewProjMStereo = Shader.PropertyToID("_ViewProjMStereo");
  1072. internal static readonly int k_ShaderPropertyId_PrevViewProjMStereo = Shader.PropertyToID("_PrevViewProjMStereo");
  1073. internal static void UpdateMotionBlurMatrices(ref Material material, Camera camera, XRPass xr)
  1074. {
  1075. MotionVectorsPersistentData motionData = null;
  1076. if(camera.TryGetComponent<UniversalAdditionalCameraData>(out var additionalCameraData))
  1077. motionData = additionalCameraData.motionVectorsPersistentData;
  1078. if (motionData == null)
  1079. return;
  1080. #if ENABLE_VR && ENABLE_XR_MODULE
  1081. if (xr.enabled && xr.singlePassEnabled)
  1082. {
  1083. material.SetMatrixArray(k_ShaderPropertyId_PrevViewProjMStereo, motionData.previousViewProjectionStereo);
  1084. material.SetMatrixArray(k_ShaderPropertyId_ViewProjMStereo, motionData.viewProjectionStereo);
  1085. }
  1086. else
  1087. #endif
  1088. {
  1089. int viewProjMIdx = 0;
  1090. #if ENABLE_VR && ENABLE_XR_MODULE
  1091. if (xr.enabled)
  1092. viewProjMIdx = xr.multipassId;
  1093. #endif
  1094. // TODO: These should be part of URP main matrix set. For now, we set them here for motion vector rendering.
  1095. material.SetMatrix(k_ShaderPropertyId_PrevViewProjM, motionData.previousViewProjectionStereo[viewProjMIdx]);
  1096. material.SetMatrix(k_ShaderPropertyId_ViewProjM, motionData.viewProjectionStereo[viewProjMIdx]);
  1097. }
  1098. }
  1099. void DoMotionBlur(CommandBuffer cmd, RTHandle source, RTHandle destination, RTHandle motionVectors, ref CameraData cameraData)
  1100. {
  1101. var material = m_Materials.cameraMotionBlur;
  1102. UpdateMotionBlurMatrices(ref material, cameraData.camera, cameraData.xr);
  1103. material.SetFloat("_Intensity", m_MotionBlur.intensity.value);
  1104. material.SetFloat("_Clamp", m_MotionBlur.clamp.value);
  1105. int pass = (int)m_MotionBlur.quality.value;
  1106. var mode = m_MotionBlur.mode.value;
  1107. if (mode == MotionBlurMode.CameraAndObjects)
  1108. {
  1109. Debug.Assert(motionVectors != null, "Motion vectors are invalid. Per-object motion blur requires a motion vector texture.");
  1110. pass += 3;
  1111. material.SetTexture(MotionVectorRenderPass.k_MotionVectorTextureName, motionVectors);
  1112. }
  1113. PostProcessUtils.SetSourceSize(cmd, source);
  1114. CoreUtils.SetKeyword(material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, cameraData.isAlphaOutputEnabled);
  1115. Blitter.BlitCameraTexture(cmd, source, destination, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, pass);
  1116. }
  1117. #endregion
  1118. #region Panini Projection
  1119. // Back-ported & adapted from the work of the Stockholm demo team - thanks Lasse!
  1120. void DoPaniniProjection(Camera camera, CommandBuffer cmd, RTHandle source, RTHandle destination)
  1121. {
  1122. float distance = m_PaniniProjection.distance.value;
  1123. var viewExtents = CalcViewExtents(camera);
  1124. var cropExtents = CalcCropExtents(camera, distance);
  1125. float scaleX = cropExtents.x / viewExtents.x;
  1126. float scaleY = cropExtents.y / viewExtents.y;
  1127. float scaleF = Mathf.Min(scaleX, scaleY);
  1128. float paniniD = distance;
  1129. float paniniS = Mathf.Lerp(1f, Mathf.Clamp01(scaleF), m_PaniniProjection.cropToFit.value);
  1130. var material = m_Materials.paniniProjection;
  1131. material.SetVector(ShaderConstants._Params, new Vector4(viewExtents.x, viewExtents.y, paniniD, paniniS));
  1132. material.EnableKeyword(
  1133. 1f - Mathf.Abs(paniniD) > float.Epsilon
  1134. ? ShaderKeywordStrings.PaniniGeneric : ShaderKeywordStrings.PaniniUnitDistance
  1135. );
  1136. Blitter.BlitCameraTexture(cmd, source, destination, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, material, 0);
  1137. }
  1138. Vector2 CalcViewExtents(Camera camera)
  1139. {
  1140. float fovY = camera.fieldOfView * Mathf.Deg2Rad;
  1141. float aspect = m_Descriptor.width / (float)m_Descriptor.height;
  1142. float viewExtY = Mathf.Tan(0.5f * fovY);
  1143. float viewExtX = aspect * viewExtY;
  1144. return new Vector2(viewExtX, viewExtY);
  1145. }
  1146. Vector2 CalcCropExtents(Camera camera, float d)
  1147. {
  1148. // given
  1149. // S----------- E--X-------
  1150. // | ` ~. /,´
  1151. // |-- --- Q
  1152. // | ,/ `
  1153. // 1 | ,´/ `
  1154. // | ,´ / ´
  1155. // | ,´ / ´
  1156. // |,` / ,
  1157. // O /
  1158. // | / ,
  1159. // d | /
  1160. // | / ,
  1161. // |/ .
  1162. // P
  1163. // | ´
  1164. // | , ´
  1165. // +- ´
  1166. //
  1167. // have X
  1168. // want to find E
  1169. float viewDist = 1f + d;
  1170. var projPos = CalcViewExtents(camera);
  1171. var projHyp = Mathf.Sqrt(projPos.x * projPos.x + 1f);
  1172. float cylDistMinusD = 1f / projHyp;
  1173. float cylDist = cylDistMinusD + d;
  1174. var cylPos = projPos * cylDistMinusD;
  1175. return cylPos * (viewDist / cylDist);
  1176. }
  1177. #endregion
  1178. #region Bloom
  1179. void SetupBloom(CommandBuffer cmd, RTHandle source, Material uberMaterial, bool enableAlphaOutput)
  1180. {
  1181. // Start at half-res
  1182. int downres = 1;
  1183. switch (m_Bloom.downscale.value)
  1184. {
  1185. case BloomDownscaleMode.Half:
  1186. downres = 1;
  1187. break;
  1188. case BloomDownscaleMode.Quarter:
  1189. downres = 2;
  1190. break;
  1191. default:
  1192. throw new System.ArgumentOutOfRangeException();
  1193. }
  1194. int tw = m_Descriptor.width >> downres;
  1195. int th = m_Descriptor.height >> downres;
  1196. // Determine the iteration count
  1197. int maxSize = Mathf.Max(tw, th);
  1198. int iterations = Mathf.FloorToInt(Mathf.Log(maxSize, 2f) - 1);
  1199. int mipCount = Mathf.Clamp(iterations, 1, m_Bloom.maxIterations.value);
  1200. // Pre-filtering parameters
  1201. float clamp = m_Bloom.clamp.value;
  1202. float threshold = Mathf.GammaToLinearSpace(m_Bloom.threshold.value);
  1203. float thresholdKnee = threshold * 0.5f; // Hardcoded soft knee
  1204. // Material setup
  1205. float scatter = Mathf.Lerp(0.05f, 0.95f, m_Bloom.scatter.value);
  1206. var bloomMaterial = m_Materials.bloom;
  1207. bloomMaterial.SetVector(ShaderConstants._Params, new Vector4(scatter, clamp, threshold, thresholdKnee));
  1208. CoreUtils.SetKeyword(bloomMaterial, ShaderKeywordStrings.BloomHQ, m_Bloom.highQualityFiltering.value);
  1209. CoreUtils.SetKeyword(bloomMaterial, ShaderKeywordStrings.UseRGBM, m_DefaultColorFormatUseRGBM);
  1210. CoreUtils.SetKeyword(bloomMaterial, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, enableAlphaOutput);
  1211. // Prefilter
  1212. var desc = GetCompatibleDescriptor(tw, th, m_DefaultColorFormat);
  1213. for (int i = 0; i < mipCount; i++)
  1214. {
  1215. RenderingUtils.ReAllocateHandleIfNeeded(ref m_BloomMipUp[i], desc, FilterMode.Bilinear, TextureWrapMode.Clamp, name: m_BloomMipUp[i].name);
  1216. RenderingUtils.ReAllocateHandleIfNeeded(ref m_BloomMipDown[i], desc, FilterMode.Bilinear, TextureWrapMode.Clamp, name: m_BloomMipDown[i].name);
  1217. desc.width = Mathf.Max(1, desc.width >> 1);
  1218. desc.height = Mathf.Max(1, desc.height >> 1);
  1219. }
  1220. Blitter.BlitCameraTexture(cmd, source, m_BloomMipDown[0], RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, bloomMaterial, 0);
  1221. // Downsample - gaussian pyramid
  1222. var lastDown = m_BloomMipDown[0];
  1223. for (int i = 1; i < mipCount; i++)
  1224. {
  1225. // Classic two pass gaussian blur - use mipUp as a temporary target
  1226. // First pass does 2x downsampling + 9-tap gaussian
  1227. // Second pass does 9-tap gaussian using a 5-tap filter + bilinear filtering
  1228. Blitter.BlitCameraTexture(cmd, lastDown, m_BloomMipUp[i], RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, bloomMaterial, 1);
  1229. Blitter.BlitCameraTexture(cmd, m_BloomMipUp[i], m_BloomMipDown[i], RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, bloomMaterial, 2);
  1230. lastDown = m_BloomMipDown[i];
  1231. }
  1232. // Upsample (bilinear by default, HQ filtering does bicubic instead
  1233. for (int i = mipCount - 2; i >= 0; i--)
  1234. {
  1235. var lowMip = (i == mipCount - 2) ? m_BloomMipDown[i + 1] : m_BloomMipUp[i + 1];
  1236. var highMip = m_BloomMipDown[i];
  1237. var dst = m_BloomMipUp[i];
  1238. cmd.SetGlobalTexture(ShaderConstants._SourceTexLowMip, lowMip);
  1239. Blitter.BlitCameraTexture(cmd, highMip, dst, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, bloomMaterial, 3);
  1240. }
  1241. // Setup bloom on uber
  1242. var tint = m_Bloom.tint.value.linear;
  1243. var luma = ColorUtils.Luminance(tint);
  1244. tint = luma > 0f ? tint * (1f / luma) : Color.white;
  1245. var bloomParams = new Vector4(m_Bloom.intensity.value, tint.r, tint.g, tint.b);
  1246. uberMaterial.SetVector(ShaderConstants._Bloom_Params, bloomParams);
  1247. uberMaterial.SetFloat(ShaderConstants._Bloom_RGBM, m_DefaultColorFormatUseRGBM ? 1f : 0f);
  1248. cmd.SetGlobalTexture(ShaderConstants._Bloom_Texture, m_BloomMipUp[0]);
  1249. // Setup lens dirtiness on uber
  1250. // Keep the aspect ratio correct & center the dirt texture, we don't want it to be
  1251. // stretched or squashed
  1252. var dirtTexture = m_Bloom.dirtTexture.value == null ? Texture2D.blackTexture : m_Bloom.dirtTexture.value;
  1253. float dirtRatio = dirtTexture.width / (float)dirtTexture.height;
  1254. float screenRatio = m_Descriptor.width / (float)m_Descriptor.height;
  1255. var dirtScaleOffset = new Vector4(1f, 1f, 0f, 0f);
  1256. float dirtIntensity = m_Bloom.dirtIntensity.value;
  1257. if (dirtRatio > screenRatio)
  1258. {
  1259. dirtScaleOffset.x = screenRatio / dirtRatio;
  1260. dirtScaleOffset.z = (1f - dirtScaleOffset.x) * 0.5f;
  1261. }
  1262. else if (screenRatio > dirtRatio)
  1263. {
  1264. dirtScaleOffset.y = dirtRatio / screenRatio;
  1265. dirtScaleOffset.w = (1f - dirtScaleOffset.y) * 0.5f;
  1266. }
  1267. uberMaterial.SetVector(ShaderConstants._LensDirt_Params, dirtScaleOffset);
  1268. uberMaterial.SetFloat(ShaderConstants._LensDirt_Intensity, dirtIntensity);
  1269. uberMaterial.SetTexture(ShaderConstants._LensDirt_Texture, dirtTexture);
  1270. // Keyword setup - a bit convoluted as we're trying to save some variants in Uber...
  1271. if (m_Bloom.highQualityFiltering.value)
  1272. uberMaterial.EnableKeyword(dirtIntensity > 0f ? ShaderKeywordStrings.BloomHQDirt : ShaderKeywordStrings.BloomHQ);
  1273. else
  1274. uberMaterial.EnableKeyword(dirtIntensity > 0f ? ShaderKeywordStrings.BloomLQDirt : ShaderKeywordStrings.BloomLQ);
  1275. }
  1276. #endregion
  1277. #region Lens Distortion
  1278. void SetupLensDistortion(Material material, bool isSceneView)
  1279. {
  1280. float amount = 1.6f * Mathf.Max(Mathf.Abs(m_LensDistortion.intensity.value * 100f), 1f);
  1281. float theta = Mathf.Deg2Rad * Mathf.Min(160f, amount);
  1282. float sigma = 2f * Mathf.Tan(theta * 0.5f);
  1283. var center = m_LensDistortion.center.value * 2f - Vector2.one;
  1284. var p1 = new Vector4(
  1285. center.x,
  1286. center.y,
  1287. Mathf.Max(m_LensDistortion.xMultiplier.value, 1e-4f),
  1288. Mathf.Max(m_LensDistortion.yMultiplier.value, 1e-4f)
  1289. );
  1290. var p2 = new Vector4(
  1291. m_LensDistortion.intensity.value >= 0f ? theta : 1f / theta,
  1292. sigma,
  1293. 1f / m_LensDistortion.scale.value,
  1294. m_LensDistortion.intensity.value * 100f
  1295. );
  1296. material.SetVector(ShaderConstants._Distortion_Params1, p1);
  1297. material.SetVector(ShaderConstants._Distortion_Params2, p2);
  1298. if (m_LensDistortion.IsActive() && !isSceneView)
  1299. material.EnableKeyword(ShaderKeywordStrings.Distortion);
  1300. }
  1301. #endregion
  1302. #region Chromatic Aberration
  1303. void SetupChromaticAberration(Material material)
  1304. {
  1305. material.SetFloat(ShaderConstants._Chroma_Params, m_ChromaticAberration.intensity.value * 0.05f);
  1306. if (m_ChromaticAberration.IsActive())
  1307. material.EnableKeyword(ShaderKeywordStrings.ChromaticAberration);
  1308. }
  1309. #endregion
  1310. #region Vignette
  1311. void SetupVignette(Material material, XRPass xrPass)
  1312. {
  1313. var color = m_Vignette.color.value;
  1314. var center = m_Vignette.center.value;
  1315. var aspectRatio = m_Descriptor.width / (float)m_Descriptor.height;
  1316. #if ENABLE_VR && ENABLE_XR_MODULE
  1317. if (xrPass != null && xrPass.enabled)
  1318. {
  1319. if (xrPass.singlePassEnabled)
  1320. material.SetVector(ShaderConstants._Vignette_ParamsXR, xrPass.ApplyXRViewCenterOffset(center));
  1321. else
  1322. // In multi-pass mode we need to modify the eye center with the values from .xy of the corrected
  1323. // center since the version of the shader that is not single-pass will use the value in _Vignette_Params2
  1324. center = xrPass.ApplyXRViewCenterOffset(center);
  1325. }
  1326. #endif
  1327. var v1 = new Vector4(
  1328. color.r, color.g, color.b,
  1329. m_Vignette.rounded.value ? aspectRatio : 1f
  1330. );
  1331. var v2 = new Vector4(
  1332. center.x, center.y,
  1333. m_Vignette.intensity.value * 3f,
  1334. m_Vignette.smoothness.value * 5f
  1335. );
  1336. material.SetVector(ShaderConstants._Vignette_Params1, v1);
  1337. material.SetVector(ShaderConstants._Vignette_Params2, v2);
  1338. }
  1339. #endregion
  1340. #region Color Grading
  1341. void SetupColorGrading(CommandBuffer cmd, ref RenderingData renderingData, Material material)
  1342. {
  1343. ref var postProcessingData = ref renderingData.postProcessingData;
  1344. bool hdr = postProcessingData.gradingMode == ColorGradingMode.HighDynamicRange;
  1345. int lutHeight = postProcessingData.lutSize;
  1346. int lutWidth = lutHeight * lutHeight;
  1347. // Source material setup
  1348. float postExposureLinear = Mathf.Pow(2f, m_ColorAdjustments.postExposure.value);
  1349. material.SetTexture(ShaderConstants._InternalLut, m_InternalLut);
  1350. material.SetVector(ShaderConstants._Lut_Params, new Vector4(1f / lutWidth, 1f / lutHeight, lutHeight - 1f, postExposureLinear));
  1351. material.SetTexture(ShaderConstants._UserLut, m_ColorLookup.texture.value);
  1352. material.SetVector(ShaderConstants._UserLut_Params, !m_ColorLookup.IsActive()
  1353. ? Vector4.zero
  1354. : new Vector4(1f / m_ColorLookup.texture.value.width,
  1355. 1f / m_ColorLookup.texture.value.height,
  1356. m_ColorLookup.texture.value.height - 1f,
  1357. m_ColorLookup.contribution.value)
  1358. );
  1359. if (hdr)
  1360. {
  1361. material.EnableKeyword(ShaderKeywordStrings.HDRGrading);
  1362. }
  1363. else
  1364. {
  1365. switch (m_Tonemapping.mode.value)
  1366. {
  1367. case TonemappingMode.Neutral: material.EnableKeyword(ShaderKeywordStrings.TonemapNeutral); break;
  1368. case TonemappingMode.ACES: material.EnableKeyword(ShaderKeywordStrings.TonemapACES); break;
  1369. default: break; // None
  1370. }
  1371. }
  1372. }
  1373. #endregion
  1374. #region Film Grain
  1375. void SetupGrain(UniversalCameraData cameraData, Material material)
  1376. {
  1377. if (!m_HasFinalPass && m_FilmGrain.IsActive())
  1378. {
  1379. material.EnableKeyword(ShaderKeywordStrings.FilmGrain);
  1380. PostProcessUtils.ConfigureFilmGrain(
  1381. m_Data,
  1382. m_FilmGrain,
  1383. cameraData.pixelWidth, cameraData.pixelHeight,
  1384. material
  1385. );
  1386. }
  1387. }
  1388. #endregion
  1389. #region 8-bit Dithering
  1390. void SetupDithering(UniversalCameraData cameraData, Material material)
  1391. {
  1392. if (!m_HasFinalPass && cameraData.isDitheringEnabled)
  1393. {
  1394. material.EnableKeyword(ShaderKeywordStrings.Dithering);
  1395. m_DitheringTextureIndex = PostProcessUtils.ConfigureDithering(
  1396. m_Data,
  1397. m_DitheringTextureIndex,
  1398. cameraData.pixelWidth, cameraData.pixelHeight,
  1399. material
  1400. );
  1401. }
  1402. }
  1403. #endregion
  1404. #region HDR Output
  1405. void SetupHDROutput(HDROutputUtils.HDRDisplayInformation hdrDisplayInformation, ColorGamut hdrDisplayColorGamut, Material material, HDROutputUtils.Operation hdrOperations)
  1406. {
  1407. Vector4 hdrOutputLuminanceParams;
  1408. UniversalRenderPipeline.GetHDROutputLuminanceParameters(hdrDisplayInformation, hdrDisplayColorGamut, m_Tonemapping, out hdrOutputLuminanceParams);
  1409. material.SetVector(ShaderPropertyId.hdrOutputLuminanceParams, hdrOutputLuminanceParams);
  1410. HDROutputUtils.ConfigureHDROutput(material, hdrDisplayColorGamut, hdrOperations);
  1411. }
  1412. #endregion
  1413. #region Final pass
  1414. void RenderFinalPass(CommandBuffer cmd, ref RenderingData renderingData)
  1415. {
  1416. UniversalCameraData cameraData = renderingData.frameData.Get<UniversalCameraData>();
  1417. var material = m_Materials.finalPass;
  1418. material.shaderKeywords = null;
  1419. // Disable obsolete warning for internal usage
  1420. #pragma warning disable CS0618
  1421. PostProcessUtils.SetSourceSize(cmd, cameraData.renderer.cameraColorTargetHandle);
  1422. #pragma warning restore CS0618
  1423. SetupGrain(renderingData.cameraData.universalCameraData, material);
  1424. SetupDithering(renderingData.cameraData.universalCameraData, material);
  1425. if (RequireSRGBConversionBlitToBackBuffer(renderingData.cameraData.requireSrgbConversion))
  1426. material.EnableKeyword(ShaderKeywordStrings.LinearToSRGBConversion);
  1427. HDROutputUtils.Operation hdrOperations = HDROutputUtils.Operation.None;
  1428. bool requireHDROutput = RequireHDROutput(renderingData.cameraData.universalCameraData);
  1429. if (requireHDROutput)
  1430. {
  1431. // If there is a final post process pass, it's always the final pass so do color encoding
  1432. hdrOperations = m_EnableColorEncodingIfNeeded ? HDROutputUtils.Operation.ColorEncoding : HDROutputUtils.Operation.None;
  1433. // If the color space conversion wasn't applied by the uber pass, do it here
  1434. if (!cameraData.postProcessEnabled)
  1435. hdrOperations |= HDROutputUtils.Operation.ColorConversion;
  1436. SetupHDROutput(cameraData.hdrDisplayInformation, cameraData.hdrDisplayColorGamut, material, hdrOperations);
  1437. }
  1438. CoreUtils.SetKeyword(material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, cameraData.isAlphaOutputEnabled);
  1439. DebugHandler debugHandler = GetActiveDebugHandler(cameraData);
  1440. bool resolveToDebugScreen = debugHandler != null && debugHandler.WriteToDebugScreenTexture(cameraData.resolveFinalTarget);
  1441. debugHandler?.UpdateShaderGlobalPropertiesForFinalValidationPass(cmd, cameraData, m_IsFinalPass && !resolveToDebugScreen);
  1442. if (m_UseSwapBuffer)
  1443. {
  1444. // Disable obsolete warning for internal usage
  1445. #pragma warning disable CS0618
  1446. m_Source = cameraData.renderer.GetCameraColorBackBuffer(cmd);
  1447. #pragma warning restore CS0618
  1448. }
  1449. RTHandle sourceTex = m_Source;
  1450. var colorLoadAction = cameraData.isDefaultViewport ? RenderBufferLoadAction.DontCare : RenderBufferLoadAction.Load;
  1451. bool isFxaaEnabled = (cameraData.antialiasing == AntialiasingMode.FastApproximateAntialiasing);
  1452. // FSR is only considered "enabled" when we're performing upscaling. (downscaling uses a linear filter unconditionally)
  1453. bool isFsrEnabled = ((cameraData.imageScalingMode == ImageScalingMode.Upscaling) && (cameraData.upscalingFilter == ImageUpscalingFilter.FSR));
  1454. // Reuse RCAS pass as an optional standalone post sharpening pass for TAA.
  1455. // This avoids the cost of EASU and is available for other upscaling options.
  1456. // If FSR is enabled then FSR settings override the TAA settings and we perform RCAS only once.
  1457. bool isTaaSharpeningEnabled = (cameraData.IsTemporalAAEnabled() && cameraData.taaSettings.contrastAdaptiveSharpening > 0.0f) && !isFsrEnabled;
  1458. if (cameraData.imageScalingMode != ImageScalingMode.None)
  1459. {
  1460. // When FXAA is enabled in scaled renders, we execute it in a separate blit since it's not designed to be used in
  1461. // situations where the input and output resolutions do not match.
  1462. // When FSR is active, we always need an additional pass since it has a very particular color encoding requirement.
  1463. // NOTE: An ideal implementation could inline this color conversion logic into the UberPost pass, but the current code structure would make
  1464. // this process very complex. Specifically, we'd need to guarantee that the uber post output is always written to a UNORM format render
  1465. // target in order to preserve the precision of specially encoded color data.
  1466. bool isSetupRequired = (isFxaaEnabled || isFsrEnabled);
  1467. // Make sure to remove any MSAA and attached depth buffers from the temporary render targets
  1468. var tempRtDesc = cameraData.cameraTargetDescriptor;
  1469. tempRtDesc.msaaSamples = 1;
  1470. tempRtDesc.depthBufferBits = 0;
  1471. // Select a UNORM format since we've already performed tonemapping. (Values are in 0-1 range)
  1472. // This improves precision and is required if we want to avoid excessive banding when FSR is in use.
  1473. if (!requireHDROutput)
  1474. tempRtDesc.graphicsFormat = UniversalRenderPipeline.MakeUnormRenderTextureGraphicsFormat();
  1475. m_Materials.scalingSetup.shaderKeywords = null;
  1476. if (isSetupRequired)
  1477. {
  1478. if (requireHDROutput)
  1479. {
  1480. SetupHDROutput(cameraData.hdrDisplayInformation, cameraData.hdrDisplayColorGamut, m_Materials.scalingSetup, hdrOperations);
  1481. }
  1482. if (isFxaaEnabled)
  1483. {
  1484. m_Materials.scalingSetup.EnableKeyword(ShaderKeywordStrings.Fxaa);
  1485. }
  1486. if (isFsrEnabled)
  1487. {
  1488. m_Materials.scalingSetup.EnableKeyword(hdrOperations.HasFlag(HDROutputUtils.Operation.ColorEncoding) ? ShaderKeywordStrings.Gamma20AndHDRInput : ShaderKeywordStrings.Gamma20);
  1489. }
  1490. RenderingUtils.ReAllocateHandleIfNeeded(ref m_ScalingSetupTarget, tempRtDesc, FilterMode.Point, TextureWrapMode.Clamp, name: "_ScalingSetupTexture");
  1491. Blitter.BlitCameraTexture(cmd, m_Source, m_ScalingSetupTarget, colorLoadAction, RenderBufferStoreAction.Store, m_Materials.scalingSetup, 0);
  1492. sourceTex = m_ScalingSetupTarget;
  1493. }
  1494. switch (cameraData.imageScalingMode)
  1495. {
  1496. case ImageScalingMode.Upscaling:
  1497. {
  1498. // In the upscaling case, set material keywords based on the selected upscaling filter
  1499. // Note: If FSR is enabled, we go down this path regardless of the current render scale. We do this because
  1500. // FSR still provides visual benefits at 100% scale. This will also make the transition between 99% and 100%
  1501. // scale less obvious for cases where FSR is used with dynamic resolution scaling.
  1502. switch (cameraData.upscalingFilter)
  1503. {
  1504. case ImageUpscalingFilter.Point:
  1505. {
  1506. // TAA post sharpening is an RCAS pass, avoid overriding it with point sampling.
  1507. if(!isTaaSharpeningEnabled)
  1508. material.EnableKeyword(ShaderKeywordStrings.PointSampling);
  1509. break;
  1510. }
  1511. case ImageUpscalingFilter.Linear:
  1512. {
  1513. // Do nothing as linear is the default filter in the shader
  1514. break;
  1515. }
  1516. case ImageUpscalingFilter.FSR:
  1517. {
  1518. m_Materials.easu.shaderKeywords = null;
  1519. var upscaleRtDesc = cameraData.cameraTargetDescriptor;
  1520. upscaleRtDesc.msaaSamples = 1;
  1521. upscaleRtDesc.depthBufferBits = 0;
  1522. upscaleRtDesc.width = cameraData.pixelWidth;
  1523. upscaleRtDesc.height = cameraData.pixelHeight;
  1524. // EASU
  1525. RenderingUtils.ReAllocateHandleIfNeeded(ref m_UpscaledTarget, upscaleRtDesc, FilterMode.Point, TextureWrapMode.Clamp, name: "_UpscaledTexture");
  1526. var fsrInputSize = new Vector2(cameraData.cameraTargetDescriptor.width, cameraData.cameraTargetDescriptor.height);
  1527. var fsrOutputSize = new Vector2(cameraData.pixelWidth, cameraData.pixelHeight);
  1528. FSRUtils.SetEasuConstants(cmd, fsrInputSize, fsrInputSize, fsrOutputSize);
  1529. Blitter.BlitCameraTexture(cmd, sourceTex, m_UpscaledTarget, colorLoadAction, RenderBufferStoreAction.Store, m_Materials.easu, 0);
  1530. // RCAS
  1531. // Use the override value if it's available, otherwise use the default.
  1532. float sharpness = cameraData.fsrOverrideSharpness ? cameraData.fsrSharpness : FSRUtils.kDefaultSharpnessLinear;
  1533. // Set up the parameters for the RCAS pass unless the sharpness value indicates that it wont have any effect.
  1534. if (cameraData.fsrSharpness > 0.0f)
  1535. {
  1536. // RCAS is performed during the final post blit, but we set up the parameters here for better logical grouping.
  1537. material.EnableKeyword(requireHDROutput ? ShaderKeywordStrings.EasuRcasAndHDRInput : ShaderKeywordStrings.Rcas);
  1538. FSRUtils.SetRcasConstantsLinear(cmd, sharpness);
  1539. }
  1540. // Update the source texture for the next operation
  1541. sourceTex = m_UpscaledTarget;
  1542. PostProcessUtils.SetSourceSize(cmd, m_UpscaledTarget);
  1543. break;
  1544. }
  1545. }
  1546. break;
  1547. }
  1548. case ImageScalingMode.Downscaling:
  1549. {
  1550. // In the downscaling case, we don't perform any sort of filter override logic since we always want linear filtering
  1551. // and it's already the default option in the shader.
  1552. // Also disable TAA post sharpening pass when downscaling.
  1553. isTaaSharpeningEnabled = false;
  1554. break;
  1555. }
  1556. }
  1557. }
  1558. else if (isFxaaEnabled)
  1559. {
  1560. // In unscaled renders, FXAA can be safely performed in the FinalPost shader
  1561. material.EnableKeyword(ShaderKeywordStrings.Fxaa);
  1562. }
  1563. // Reuse RCAS as a standalone sharpening filter for TAA.
  1564. // If FSR is enabled then it overrides the TAA setting and we skip it.
  1565. if(isTaaSharpeningEnabled)
  1566. {
  1567. material.EnableKeyword(ShaderKeywordStrings.Rcas);
  1568. FSRUtils.SetRcasConstantsLinear(cmd, cameraData.taaSettings.contrastAdaptiveSharpening);
  1569. }
  1570. var cameraTarget = RenderingUtils.GetCameraTargetIdentifier(ref renderingData);
  1571. if (resolveToDebugScreen)
  1572. {
  1573. // Blit to the debugger texture instead of the camera target
  1574. Blitter.BlitCameraTexture(cmd, sourceTex, debugHandler.DebugScreenColorHandle, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, material, 0);
  1575. // Disable obsolete warning for internal usage
  1576. #pragma warning disable CS0618
  1577. cameraData.renderer.ConfigureCameraTarget(debugHandler.DebugScreenColorHandle, debugHandler.DebugScreenDepthHandle);
  1578. #pragma warning restore CS0618
  1579. }
  1580. else
  1581. {
  1582. // Get RTHandle alias to use RTHandle apis
  1583. RTHandleStaticHelpers.SetRTHandleStaticWrapper(cameraTarget);
  1584. var cameraTargetHandle = RTHandleStaticHelpers.s_RTHandleWrapper;
  1585. RenderingUtils.FinalBlit(cmd, cameraData, sourceTex, cameraTargetHandle, colorLoadAction, RenderBufferStoreAction.Store, material, 0);
  1586. }
  1587. }
  1588. #endregion
  1589. #region Internal utilities
  1590. class MaterialLibrary
  1591. {
  1592. public readonly Material stopNaN;
  1593. public readonly Material subpixelMorphologicalAntialiasing;
  1594. public readonly Material gaussianDepthOfField;
  1595. public readonly Material gaussianDepthOfFieldCoC;
  1596. public readonly Material bokehDepthOfField;
  1597. public readonly Material bokehDepthOfFieldCoC;
  1598. public readonly Material cameraMotionBlur;
  1599. public readonly Material paniniProjection;
  1600. public readonly Material bloom;
  1601. public readonly Material[] bloomUpsample;
  1602. public readonly Material temporalAntialiasing;
  1603. public readonly Material scalingSetup;
  1604. public readonly Material easu;
  1605. public readonly Material uber;
  1606. public readonly Material finalPass;
  1607. public readonly Material lensFlareDataDriven;
  1608. public readonly Material lensFlareScreenSpace;
  1609. public MaterialLibrary(PostProcessData data)
  1610. {
  1611. // NOTE NOTE NOTE NOTE NOTE NOTE
  1612. // If you create something here you must also destroy it in Cleanup()
  1613. // or it will leak during enter/leave play mode cycles
  1614. // NOTE NOTE NOTE NOTE NOTE NOTE
  1615. stopNaN = Load(data.shaders.stopNanPS);
  1616. subpixelMorphologicalAntialiasing = Load(data.shaders.subpixelMorphologicalAntialiasingPS);
  1617. gaussianDepthOfField = Load(data.shaders.gaussianDepthOfFieldPS);
  1618. gaussianDepthOfFieldCoC = Load(data.shaders.gaussianDepthOfFieldPS);
  1619. bokehDepthOfField = Load(data.shaders.bokehDepthOfFieldPS);
  1620. bokehDepthOfFieldCoC = Load(data.shaders.bokehDepthOfFieldPS);
  1621. cameraMotionBlur = Load(data.shaders.cameraMotionBlurPS);
  1622. paniniProjection = Load(data.shaders.paniniProjectionPS);
  1623. bloom = Load(data.shaders.bloomPS);
  1624. temporalAntialiasing = Load(data.shaders.temporalAntialiasingPS);
  1625. scalingSetup = Load(data.shaders.scalingSetupPS);
  1626. easu = Load(data.shaders.easuPS);
  1627. uber = Load(data.shaders.uberPostPS);
  1628. finalPass = Load(data.shaders.finalPostPassPS);
  1629. lensFlareDataDriven = Load(data.shaders.LensFlareDataDrivenPS);
  1630. lensFlareScreenSpace = Load(data.shaders.LensFlareScreenSpacePS);
  1631. bloomUpsample = new Material[k_MaxPyramidSize];
  1632. for (uint i = 0; i < k_MaxPyramidSize; ++i)
  1633. bloomUpsample[i] = Load(data.shaders.bloomPS);
  1634. }
  1635. Material Load(Shader shader)
  1636. {
  1637. if (shader == null)
  1638. {
  1639. Debug.LogErrorFormat($"Missing shader. PostProcessing render passes will not execute. Check for missing reference in the renderer resources.");
  1640. return null;
  1641. }
  1642. else if (!shader.isSupported)
  1643. {
  1644. return null;
  1645. }
  1646. return CoreUtils.CreateEngineMaterial(shader);
  1647. }
  1648. internal void Cleanup()
  1649. {
  1650. CoreUtils.Destroy(stopNaN);
  1651. CoreUtils.Destroy(subpixelMorphologicalAntialiasing);
  1652. CoreUtils.Destroy(gaussianDepthOfField);
  1653. CoreUtils.Destroy(gaussianDepthOfFieldCoC);
  1654. CoreUtils.Destroy(bokehDepthOfField);
  1655. CoreUtils.Destroy(bokehDepthOfFieldCoC);
  1656. CoreUtils.Destroy(cameraMotionBlur);
  1657. CoreUtils.Destroy(paniniProjection);
  1658. CoreUtils.Destroy(bloom);
  1659. CoreUtils.Destroy(temporalAntialiasing);
  1660. CoreUtils.Destroy(scalingSetup);
  1661. CoreUtils.Destroy(easu);
  1662. CoreUtils.Destroy(uber);
  1663. CoreUtils.Destroy(finalPass);
  1664. CoreUtils.Destroy(lensFlareDataDriven);
  1665. CoreUtils.Destroy(lensFlareScreenSpace);
  1666. for (uint i = 0; i < k_MaxPyramidSize; ++i)
  1667. CoreUtils.Destroy(bloomUpsample[i]);
  1668. }
  1669. }
  1670. // Precomputed shader ids to same some CPU cycles (mostly affects mobile)
  1671. static class ShaderConstants
  1672. {
  1673. public static readonly int _TempTarget = Shader.PropertyToID("_TempTarget");
  1674. public static readonly int _TempTarget2 = Shader.PropertyToID("_TempTarget2");
  1675. public static readonly int _StencilRef = Shader.PropertyToID("_StencilRef");
  1676. public static readonly int _StencilMask = Shader.PropertyToID("_StencilMask");
  1677. public static readonly int _FullCoCTexture = Shader.PropertyToID("_FullCoCTexture");
  1678. public static readonly int _HalfCoCTexture = Shader.PropertyToID("_HalfCoCTexture");
  1679. public static readonly int _DofTexture = Shader.PropertyToID("_DofTexture");
  1680. public static readonly int _CoCParams = Shader.PropertyToID("_CoCParams");
  1681. public static readonly int _BokehKernel = Shader.PropertyToID("_BokehKernel");
  1682. public static readonly int _BokehConstants = Shader.PropertyToID("_BokehConstants");
  1683. public static readonly int _PongTexture = Shader.PropertyToID("_PongTexture");
  1684. public static readonly int _PingTexture = Shader.PropertyToID("_PingTexture");
  1685. public static readonly int _Metrics = Shader.PropertyToID("_Metrics");
  1686. public static readonly int _AreaTexture = Shader.PropertyToID("_AreaTexture");
  1687. public static readonly int _SearchTexture = Shader.PropertyToID("_SearchTexture");
  1688. public static readonly int _EdgeTexture = Shader.PropertyToID("_EdgeTexture");
  1689. public static readonly int _BlendTexture = Shader.PropertyToID("_BlendTexture");
  1690. public static readonly int _ColorTexture = Shader.PropertyToID("_ColorTexture");
  1691. public static readonly int _Params = Shader.PropertyToID("_Params");
  1692. public static readonly int _SourceTexLowMip = Shader.PropertyToID("_SourceTexLowMip");
  1693. public static readonly int _Bloom_Params = Shader.PropertyToID("_Bloom_Params");
  1694. public static readonly int _Bloom_RGBM = Shader.PropertyToID("_Bloom_RGBM");
  1695. public static readonly int _Bloom_Texture = Shader.PropertyToID("_Bloom_Texture");
  1696. public static readonly int _LensDirt_Texture = Shader.PropertyToID("_LensDirt_Texture");
  1697. public static readonly int _LensDirt_Params = Shader.PropertyToID("_LensDirt_Params");
  1698. public static readonly int _LensDirt_Intensity = Shader.PropertyToID("_LensDirt_Intensity");
  1699. public static readonly int _Distortion_Params1 = Shader.PropertyToID("_Distortion_Params1");
  1700. public static readonly int _Distortion_Params2 = Shader.PropertyToID("_Distortion_Params2");
  1701. public static readonly int _Chroma_Params = Shader.PropertyToID("_Chroma_Params");
  1702. public static readonly int _Vignette_Params1 = Shader.PropertyToID("_Vignette_Params1");
  1703. public static readonly int _Vignette_Params2 = Shader.PropertyToID("_Vignette_Params2");
  1704. public static readonly int _Vignette_ParamsXR = Shader.PropertyToID("_Vignette_ParamsXR");
  1705. public static readonly int _Lut_Params = Shader.PropertyToID("_Lut_Params");
  1706. public static readonly int _UserLut_Params = Shader.PropertyToID("_UserLut_Params");
  1707. public static readonly int _InternalLut = Shader.PropertyToID("_InternalLut");
  1708. public static readonly int _UserLut = Shader.PropertyToID("_UserLut");
  1709. public static readonly int _DownSampleScaleFactor = Shader.PropertyToID("_DownSampleScaleFactor");
  1710. public static readonly int _FlareOcclusionRemapTex = Shader.PropertyToID("_FlareOcclusionRemapTex");
  1711. public static readonly int _FlareOcclusionTex = Shader.PropertyToID("_FlareOcclusionTex");
  1712. public static readonly int _FlareOcclusionIndex = Shader.PropertyToID("_FlareOcclusionIndex");
  1713. public static readonly int _FlareTex = Shader.PropertyToID("_FlareTex");
  1714. public static readonly int _FlareColorValue = Shader.PropertyToID("_FlareColorValue");
  1715. public static readonly int _FlareData0 = Shader.PropertyToID("_FlareData0");
  1716. public static readonly int _FlareData1 = Shader.PropertyToID("_FlareData1");
  1717. public static readonly int _FlareData2 = Shader.PropertyToID("_FlareData2");
  1718. public static readonly int _FlareData3 = Shader.PropertyToID("_FlareData3");
  1719. public static readonly int _FlareData4 = Shader.PropertyToID("_FlareData4");
  1720. public static readonly int _FlareData5 = Shader.PropertyToID("_FlareData5");
  1721. public static readonly int _FullscreenProjMat = Shader.PropertyToID("_FullscreenProjMat");
  1722. public static int[] _BloomMipUp;
  1723. public static int[] _BloomMipDown;
  1724. }
  1725. #endregion
  1726. }
  1727. }