暂无描述
您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

UniversalRenderPipeline.cs 118KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379
  1. using System;
  2. using Unity.Collections;
  3. using System.Collections.Generic;
  4. #if UNITY_EDITOR
  5. using UnityEditor;
  6. using UnityEditor.Rendering.Universal;
  7. #endif
  8. using UnityEngine.Scripting.APIUpdating;
  9. using Lightmapping = UnityEngine.Experimental.GlobalIllumination.Lightmapping;
  10. using UnityEngine.Experimental.Rendering;
  11. using UnityEngine.Rendering.RenderGraphModule;
  12. using UnityEngine.Profiling;
  13. using static UnityEngine.Camera;
  14. namespace UnityEngine.Rendering.Universal
  15. {
  16. /// <summary>
  17. /// The main class for the Universal Render Pipeline (URP).
  18. /// </summary>
  19. public sealed partial class UniversalRenderPipeline : RenderPipeline
  20. {
  21. /// <summary>
  22. /// The shader tag used in the Universal Render Pipeline (URP)
  23. /// </summary>
  24. public const string k_ShaderTagName = "UniversalPipeline";
  25. // Cache camera data to avoid per-frame allocations.
  26. internal static class CameraMetadataCache
  27. {
  28. public class CameraMetadataCacheEntry
  29. {
  30. public string name;
  31. public ProfilingSampler sampler;
  32. }
  33. static Dictionary<int, CameraMetadataCacheEntry> s_MetadataCache = new();
  34. static readonly CameraMetadataCacheEntry k_NoAllocEntry = new() { name = "Unknown", sampler = new ProfilingSampler("Unknown") };
  35. public static CameraMetadataCacheEntry GetCached(Camera camera)
  36. {
  37. #if UNIVERSAL_PROFILING_NO_ALLOC
  38. return k_NoAllocEntry;
  39. #else
  40. int cameraId = camera.GetHashCode();
  41. if (!s_MetadataCache.TryGetValue(cameraId, out CameraMetadataCacheEntry result))
  42. {
  43. string cameraName = camera.name; // Warning: camera.name allocates
  44. result = new CameraMetadataCacheEntry
  45. {
  46. name = cameraName,
  47. sampler = new ProfilingSampler(
  48. $"{nameof(UniversalRenderPipeline)}.{nameof(RenderSingleCameraInternal)}: {cameraName}")
  49. };
  50. s_MetadataCache.Add(cameraId, result);
  51. }
  52. return result;
  53. #endif
  54. }
  55. }
  56. internal static class Profiling
  57. {
  58. public static class Pipeline
  59. {
  60. const string k_Name = nameof(UniversalRenderPipeline);
  61. public static readonly ProfilingSampler initializeCameraData = new ProfilingSampler($"{k_Name}.{nameof(CreateCameraData)}");
  62. public static readonly ProfilingSampler initializeStackedCameraData = new ProfilingSampler($"{k_Name}.{nameof(InitializeStackedCameraData)}");
  63. public static readonly ProfilingSampler initializeAdditionalCameraData = new ProfilingSampler($"{k_Name}.{nameof(InitializeAdditionalCameraData)}");
  64. public static readonly ProfilingSampler initializeRenderingData = new ProfilingSampler($"{k_Name}.{nameof(CreateRenderingData)}");
  65. public static readonly ProfilingSampler initializeShadowData = new ProfilingSampler($"{k_Name}.{nameof(CreateShadowData)}");
  66. public static readonly ProfilingSampler initializeLightData = new ProfilingSampler($"{k_Name}.{nameof(CreateLightData)}");
  67. public static readonly ProfilingSampler buildAdditionalLightsShadowAtlasLayout = new ProfilingSampler($"{k_Name}.{nameof(BuildAdditionalLightsShadowAtlasLayout)}");
  68. public static readonly ProfilingSampler getPerObjectLightFlags = new ProfilingSampler($"{k_Name}.{nameof(GetPerObjectLightFlags)}");
  69. public static readonly ProfilingSampler getMainLightIndex = new ProfilingSampler($"{k_Name}.{nameof(GetMainLightIndex)}");
  70. public static readonly ProfilingSampler setupPerFrameShaderConstants = new ProfilingSampler($"{k_Name}.{nameof(SetupPerFrameShaderConstants)}");
  71. public static readonly ProfilingSampler setupPerCameraShaderConstants = new ProfilingSampler($"{k_Name}.{nameof(SetupPerCameraShaderConstants)}");
  72. public static class Renderer
  73. {
  74. const string k_Name = nameof(ScriptableRenderer);
  75. public static readonly ProfilingSampler setupCullingParameters = new ProfilingSampler($"{k_Name}.{nameof(ScriptableRenderer.SetupCullingParameters)}");
  76. public static readonly ProfilingSampler setup = new ProfilingSampler($"{k_Name}.{nameof(ScriptableRenderer.Setup)}");
  77. };
  78. public static class Context
  79. {
  80. const string k_Name = nameof(ScriptableRenderContext);
  81. public static readonly ProfilingSampler submit = new ProfilingSampler($"{k_Name}.{nameof(ScriptableRenderContext.Submit)}");
  82. };
  83. };
  84. }
  85. /// <summary>
  86. /// The maximum amount of bias allowed for shadows.
  87. /// </summary>
  88. public static float maxShadowBias
  89. {
  90. get => 10.0f;
  91. }
  92. /// <summary>
  93. /// The minimum value allowed for render scale.
  94. /// </summary>
  95. public static float minRenderScale
  96. {
  97. get => 0.1f;
  98. }
  99. /// <summary>
  100. /// The maximum value allowed for render scale.
  101. /// </summary>
  102. public static float maxRenderScale
  103. {
  104. get => 2.0f;
  105. }
  106. /// <summary>
  107. /// The max number of iterations allowed calculating enclosing sphere.
  108. /// </summary>
  109. public static int maxNumIterationsEnclosingSphere
  110. {
  111. get => 1000;
  112. }
  113. /// <summary>
  114. /// The max number of lights that can be shaded per object (in the for loop in the shader).
  115. /// </summary>
  116. public static int maxPerObjectLights
  117. {
  118. get => 8;
  119. }
  120. /// <summary>
  121. /// The max number of additional lights that can can affect each GameObject.
  122. /// </summary>
  123. public static int maxVisibleAdditionalLights
  124. {
  125. get
  126. {
  127. // Must match: Input.hlsl, MAX_VISIBLE_LIGHTS
  128. bool isMobileOrMobileBuildTarget = PlatformAutoDetect.isShaderAPIMobileDefined;
  129. if (isMobileOrMobileBuildTarget && (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3 && Graphics.minOpenGLESVersion <= OpenGLESVersion.OpenGLES30))
  130. return ShaderOptions.k_MaxVisibleLightCountLowEndMobile;
  131. // GLES can be selected as platform on Windows (not a mobile platform) but uniform buffer size so we must use a low light count.
  132. // WebGPU's minimal limits are based on mobile rather than desktop, so it will need to assume mobile.
  133. return (isMobileOrMobileBuildTarget || SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLCore || SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3 || SystemInfo.graphicsDeviceType == GraphicsDeviceType.WebGPU)
  134. ? ShaderOptions.k_MaxVisibleLightCountMobile : ShaderOptions.k_MaxVisibleLightCountDesktop;
  135. }
  136. }
  137. // Match with values in Input.hlsl
  138. internal static int lightsPerTile => ((maxVisibleAdditionalLights + 31) / 32) * 32;
  139. internal static int maxZBinWords => 1024 * 4;
  140. internal static int maxTileWords => (maxVisibleAdditionalLights <= 32 ? 1024 : 4096) * 4;
  141. internal static int maxVisibleReflectionProbes => Math.Min(maxVisibleAdditionalLights, 64);
  142. internal const int k_DefaultRenderingLayerMask = 0x00000001;
  143. private readonly DebugDisplaySettingsUI m_DebugDisplaySettingsUI = new DebugDisplaySettingsUI();
  144. private UniversalRenderPipelineGlobalSettings m_GlobalSettings;
  145. internal UniversalRenderPipelineRuntimeTextures runtimeTextures { get; private set; }
  146. /// <summary>
  147. /// The default Render Pipeline Global Settings.
  148. /// </summary>
  149. public override RenderPipelineGlobalSettings defaultSettings => m_GlobalSettings;
  150. // flag to keep track of depth buffer requirements by any of the cameras in the stack
  151. internal static bool cameraStackRequiresDepthForPostprocessing = false;
  152. internal static RenderGraph s_RenderGraph;
  153. internal static RTHandleResourcePool s_RTHandlePool;
  154. // internal for tests
  155. internal static bool useRenderGraph;
  156. // Store locally the value on the instance due as the Render Pipeline Asset data might change before the disposal of the asset, making some APV Resources leak.
  157. internal bool apvIsEnabled = false;
  158. // Reference to the asset associated with the pipeline.
  159. // When a pipeline asset is switched in `GraphicsSettings`, the `UniversalRenderPipelineCore.asset` member
  160. // becomes unreliable for the purpose of pipeline and renderer clean-up in the `Dispose` call from
  161. // `RenderPipelineManager.CleanupRenderPipeline`.
  162. // This field provides the correct reference for the purpose of cleaning up the renderers on this pipeline
  163. // asset.
  164. private readonly UniversalRenderPipelineAsset pipelineAsset;
  165. /// <inheritdoc/>
  166. public override string ToString() => pipelineAsset?.ToString();
  167. /// <summary>
  168. /// Creates a new <c>UniversalRenderPipeline</c> instance.
  169. /// </summary>
  170. /// <param name="asset">The <c>UniversalRenderPipelineAsset</c> asset to initialize the pipeline.</param>
  171. /// <seealso cref="RenderPassEvent"/>
  172. public UniversalRenderPipeline(UniversalRenderPipelineAsset asset)
  173. {
  174. pipelineAsset = asset;
  175. m_GlobalSettings = UniversalRenderPipelineGlobalSettings.instance;
  176. runtimeTextures = GraphicsSettings.GetRenderPipelineSettings<UniversalRenderPipelineRuntimeTextures>();
  177. var shaders = GraphicsSettings.GetRenderPipelineSettings<UniversalRenderPipelineRuntimeShaders>();
  178. Blitter.Initialize(shaders.coreBlitPS, shaders.coreBlitColorAndDepthPS);
  179. SetSupportedRenderingFeatures(pipelineAsset);
  180. // Initial state of the RTHandle system.
  181. // We initialize to screen width/height to avoid multiple realloc that can lead to inflated memory usage (as releasing of memory is delayed).
  182. RTHandles.Initialize(Screen.width, Screen.height);
  183. // Init global shader keywords
  184. ShaderGlobalKeywords.InitializeShaderGlobalKeywords();
  185. GraphicsSettings.useScriptableRenderPipelineBatching = asset.useSRPBatcher;
  186. // In QualitySettings.antiAliasing disabled state uses value 0, where in URP 1
  187. int qualitySettingsMsaaSampleCount = QualitySettings.antiAliasing > 0 ? QualitySettings.antiAliasing : 1;
  188. bool msaaSampleCountNeedsUpdate = qualitySettingsMsaaSampleCount != asset.msaaSampleCount;
  189. // Let engine know we have MSAA on for cases where we support MSAA backbuffer
  190. if (msaaSampleCountNeedsUpdate)
  191. {
  192. QualitySettings.antiAliasing = asset.msaaSampleCount;
  193. }
  194. var defaultVolumeProfileSettings = GraphicsSettings.GetRenderPipelineSettings<URPDefaultVolumeProfileSettings>();
  195. VolumeManager.instance.Initialize(defaultVolumeProfileSettings.volumeProfile, asset.volumeProfile);
  196. // Configure initial XR settings
  197. MSAASamples msaaSamples = (MSAASamples)Mathf.Clamp(Mathf.NextPowerOfTwo(QualitySettings.antiAliasing), (int)MSAASamples.None, (int)MSAASamples.MSAA8x);
  198. XRSystem.SetDisplayMSAASamples(msaaSamples);
  199. XRSystem.SetRenderScale(asset.renderScale);
  200. Lightmapping.SetDelegate(lightsDelegate);
  201. CameraCaptureBridge.enabled = true;
  202. RenderingUtils.ClearSystemInfoCache();
  203. DecalProjector.defaultMaterial = asset.decalMaterial;
  204. s_RenderGraph = new RenderGraph("URPRenderGraph");
  205. useRenderGraph = !GraphicsSettings.GetRenderPipelineSettings<RenderGraphSettings>().enableRenderCompatibilityMode;
  206. #if !UNITY_EDITOR
  207. Debug.Log($"RenderGraph is now {(useRenderGraph ? "enabled" : "disabled")}.");
  208. #endif
  209. s_RTHandlePool = new RTHandleResourcePool();
  210. DebugManager.instance.RefreshEditor();
  211. #if DEVELOPMENT_BUILD || UNITY_EDITOR
  212. m_DebugDisplaySettingsUI.RegisterDebug(UniversalRenderPipelineDebugDisplaySettings.Instance);
  213. #endif
  214. QualitySettings.enableLODCrossFade = asset.enableLODCrossFade;
  215. apvIsEnabled = asset != null && asset.lightProbeSystem == LightProbeSystem.ProbeVolumes;
  216. SupportedRenderingFeatures.active.overridesLightProbeSystem = apvIsEnabled;
  217. SupportedRenderingFeatures.active.skyOcclusion = apvIsEnabled;
  218. if (apvIsEnabled)
  219. {
  220. ProbeReferenceVolume.instance.Initialize(new ProbeVolumeSystemParameters
  221. {
  222. memoryBudget = asset.probeVolumeMemoryBudget,
  223. blendingMemoryBudget = asset.probeVolumeBlendingMemoryBudget,
  224. shBands = asset.probeVolumeSHBands,
  225. supportGPUStreaming = asset.supportProbeVolumeGPUStreaming,
  226. supportDiskStreaming = asset.supportProbeVolumeDiskStreaming,
  227. supportScenarios = asset.supportProbeVolumeScenarios,
  228. supportScenarioBlending = asset.supportProbeVolumeScenarioBlending,
  229. #pragma warning disable 618
  230. sceneData = m_GlobalSettings.GetOrCreateAPVSceneData(),
  231. #pragma warning restore 618
  232. });
  233. }
  234. }
  235. /// <inheritdoc/>
  236. protected override void Dispose(bool disposing)
  237. {
  238. if (apvIsEnabled)
  239. {
  240. ProbeReferenceVolume.instance.Cleanup();
  241. }
  242. #if DEVELOPMENT_BUILD || UNITY_EDITOR
  243. m_DebugDisplaySettingsUI.UnregisterDebug();
  244. #endif
  245. Blitter.Cleanup();
  246. base.Dispose(disposing);
  247. pipelineAsset.DestroyRenderers();
  248. SupportedRenderingFeatures.active = new SupportedRenderingFeatures();
  249. ShaderData.instance.Dispose();
  250. XRSystem.Dispose();
  251. s_RenderGraph.Cleanup();
  252. s_RenderGraph = null;
  253. s_RTHandlePool.Cleanup();
  254. s_RTHandlePool = null;
  255. #if UNITY_EDITOR
  256. SceneViewDrawMode.ResetDrawMode();
  257. #endif
  258. Lightmapping.ResetDelegate();
  259. CameraCaptureBridge.enabled = false;
  260. ConstantBuffer.ReleaseAll();
  261. VolumeManager.instance.Deinitialize();
  262. DisposeAdditionalCameraData();
  263. AdditionalLightsShadowAtlasLayout.ClearStaticCaches();
  264. }
  265. // If the URP gets destroyed, we must clean up all the added URP specific camera data and
  266. // non-GC resources to avoid leaking them.
  267. private void DisposeAdditionalCameraData()
  268. {
  269. foreach (var c in Camera.allCameras)
  270. {
  271. if (c.TryGetComponent<UniversalAdditionalCameraData>(out var additionalCameraData))
  272. {
  273. additionalCameraData.historyManager.Dispose();
  274. };
  275. }
  276. }
  277. readonly struct CameraRenderingScope : IDisposable
  278. {
  279. static readonly ProfilingSampler beginCameraRenderingSampler = new ProfilingSampler($"{nameof(RenderPipeline)}.{nameof(BeginCameraRendering)}");
  280. static readonly ProfilingSampler endCameraRenderingSampler = new ProfilingSampler($"{nameof(RenderPipeline)}.{nameof(EndCameraRendering)}");
  281. private readonly ScriptableRenderContext m_Context;
  282. private readonly Camera m_Camera;
  283. public CameraRenderingScope(ScriptableRenderContext context, Camera camera)
  284. {
  285. using (new ProfilingScope(beginCameraRenderingSampler))
  286. {
  287. m_Context = context;
  288. m_Camera = camera;
  289. BeginCameraRendering(context, camera);
  290. }
  291. }
  292. public void Dispose()
  293. {
  294. using (new ProfilingScope(endCameraRenderingSampler))
  295. {
  296. EndCameraRendering(m_Context, m_Camera);
  297. }
  298. }
  299. }
  300. readonly struct ContextRenderingScope : IDisposable
  301. {
  302. static readonly ProfilingSampler beginContextRenderingSampler = new ProfilingSampler($"{nameof(RenderPipeline)}.{nameof(BeginContextRendering)}");
  303. static readonly ProfilingSampler endContextRenderingSampler = new ProfilingSampler($"{nameof(RenderPipeline)}.{nameof(EndContextRendering)}");
  304. private readonly ScriptableRenderContext m_Context;
  305. private readonly List<Camera> m_Cameras;
  306. public ContextRenderingScope(ScriptableRenderContext context, List<Camera> cameras)
  307. {
  308. m_Context = context;
  309. m_Cameras = cameras;
  310. using (new ProfilingScope(beginContextRenderingSampler))
  311. {
  312. BeginContextRendering(m_Context, m_Cameras);
  313. }
  314. }
  315. public void Dispose()
  316. {
  317. using (new ProfilingScope(endContextRenderingSampler))
  318. {
  319. EndContextRendering(m_Context, m_Cameras);
  320. }
  321. }
  322. }
  323. #if UNITY_2021_1_OR_NEWER
  324. /// <inheritdoc/>
  325. protected override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
  326. {
  327. Render(renderContext, new List<Camera>(cameras));
  328. }
  329. #endif
  330. #if UNITY_2021_1_OR_NEWER
  331. /// <inheritdoc/>
  332. protected override void Render(ScriptableRenderContext renderContext, List<Camera> cameras)
  333. #else
  334. /// <inheritdoc/>
  335. protected override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
  336. #endif
  337. {
  338. SetHDRState(cameras);
  339. #if UNITY_2021_1_OR_NEWER
  340. int cameraCount = cameras.Count;
  341. #else
  342. int cameraCount = cameras.Length;
  343. #endif
  344. // For XR, HDR and no camera cases, UI Overlay ownership must be enforced
  345. AdjustUIOverlayOwnership(cameraCount);
  346. GPUResidentDrawer.ReinitializeIfNeeded();
  347. // TODO: Would be better to add Profiling name hooks into RenderPipelineManager.
  348. // C#8 feature, only in >= 2020.2
  349. using var profScope = new ProfilingScope(ProfilingSampler.Get(URPProfileId.UniversalRenderTotal));
  350. using (new ContextRenderingScope(renderContext, cameras))
  351. {
  352. GraphicsSettings.lightsUseLinearIntensity = (QualitySettings.activeColorSpace == ColorSpace.Linear);
  353. GraphicsSettings.lightsUseColorTemperature = true;
  354. SetupPerFrameShaderConstants();
  355. XRSystem.SetDisplayMSAASamples((MSAASamples)asset.msaaSampleCount);
  356. #if DEVELOPMENT_BUILD || UNITY_EDITOR
  357. if (DebugManager.instance.isAnyDebugUIActive)
  358. UniversalRenderPipelineDebugDisplaySettings.Instance.UpdateDisplayStats();
  359. // This is for texture streaming
  360. UniversalRenderPipelineDebugDisplaySettings.Instance.UpdateMaterials();
  361. #endif
  362. // URP uses the camera's allowDynamicResolution flag to decide if useDynamicScale should be enabled for camera render targets.
  363. // However, the RTHandle system has an additional setting that controls if useDynamicScale will be set for render targets allocated via RTHandles.
  364. // In order to avoid issues at runtime, we must make the RTHandle system setting consistent with URP's logic. URP already synchronizes the setting
  365. // during initialization, but unfortunately it's possible for external code to overwrite the setting due to RTHandle state being global.
  366. // The best we can do to avoid errors in this situation is to ensure the state is set to the correct value every time we perform rendering.
  367. RTHandles.SetHardwareDynamicResolutionState(true);
  368. SortCameras(cameras);
  369. #if UNITY_2021_1_OR_NEWER
  370. for (int i = 0; i < cameras.Count; ++i)
  371. #else
  372. for (int i = 0; i < cameras.Length; ++i)
  373. #endif
  374. {
  375. var camera = cameras[i];
  376. if (IsGameCamera(camera))
  377. {
  378. RenderCameraStack(renderContext, camera);
  379. }
  380. else
  381. {
  382. using (new CameraRenderingScope(renderContext, camera))
  383. {
  384. #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
  385. //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
  386. //N.B.: We aren't expecting an XR camera at this stage
  387. VFX.VFXManager.PrepareCamera(camera);
  388. #endif
  389. UpdateVolumeFramework(camera, null);
  390. RenderSingleCameraInternal(renderContext, camera);
  391. }
  392. }
  393. }
  394. s_RenderGraph.EndFrame();
  395. s_RTHandlePool.PurgeUnusedResources(Time.frameCount);
  396. }
  397. #if ENABLE_SHADER_DEBUG_PRINT
  398. ShaderDebugPrintManager.instance.EndFrame();
  399. #endif
  400. }
  401. /// <summary>
  402. /// Check whether RenderRequest is supported
  403. /// </summary>
  404. /// <param name="camera"></param>
  405. /// <param name="data"></param>
  406. /// <typeparam name="RequestData"></typeparam>
  407. /// <returns></returns>
  408. protected override bool IsRenderRequestSupported<RequestData>(Camera camera, RequestData data)
  409. {
  410. if (data is StandardRequest)
  411. return true;
  412. else if (data is SingleCameraRequest)
  413. return true;
  414. return false;
  415. }
  416. /// <summary>
  417. /// Process a render request
  418. /// </summary>
  419. /// <param name="context"></param>
  420. /// <param name="camera"></param>
  421. /// <param name="renderRequest"></param>
  422. /// <typeparam name="RequestData"></typeparam>
  423. protected override void ProcessRenderRequests<RequestData>(ScriptableRenderContext context, Camera camera, RequestData renderRequest)
  424. {
  425. StandardRequest standardRequest = renderRequest as StandardRequest;
  426. SingleCameraRequest singleRequest = renderRequest as SingleCameraRequest;
  427. if(standardRequest != null || singleRequest != null)
  428. {
  429. RenderTexture destination = standardRequest != null ? standardRequest.destination : singleRequest.destination;
  430. //don't go further if no destination texture
  431. if(destination == null)
  432. {
  433. Debug.LogError("RenderRequest has no destination texture, set one before sending request");
  434. return;
  435. }
  436. int mipLevel = standardRequest != null ? standardRequest.mipLevel : singleRequest.mipLevel;
  437. int slice = standardRequest != null ? standardRequest.slice : singleRequest.slice;
  438. int face = standardRequest != null ? (int)standardRequest.face : (int)singleRequest.face;
  439. //store data that will be changed
  440. var originalTarget = camera.targetTexture;
  441. //set data
  442. RenderTexture temporaryRT = null;
  443. RenderTextureDescriptor RTDesc = destination.descriptor;
  444. //need to set use default constructor of RenderTextureDescriptor which doesn't enable allowVerticalFlip which matters for cubemaps.
  445. if (destination.dimension == TextureDimension.Cube)
  446. RTDesc = new RenderTextureDescriptor();
  447. RTDesc.colorFormat = destination.format;
  448. RTDesc.volumeDepth = 1;
  449. RTDesc.msaaSamples = destination.descriptor.msaaSamples;
  450. RTDesc.dimension = TextureDimension.Tex2D;
  451. RTDesc.width = destination.width / (int)Math.Pow(2, mipLevel);
  452. RTDesc.height = destination.height / (int)Math.Pow(2, mipLevel);
  453. RTDesc.width = Mathf.Max(1, RTDesc.width);
  454. RTDesc.height = Mathf.Max(1, RTDesc.height);
  455. //if mip is 0 and target is Texture2D we can immediately render to the requested destination
  456. if(destination.dimension != TextureDimension.Tex2D || mipLevel != 0)
  457. {
  458. temporaryRT = RenderTexture.GetTemporary(RTDesc);
  459. }
  460. camera.targetTexture = temporaryRT ? temporaryRT : destination;
  461. if (standardRequest != null)
  462. {
  463. Render(context, new Camera[] { camera });
  464. }
  465. else
  466. {
  467. using (ListPool<Camera>.Get(out var tmp))
  468. {
  469. tmp.Add(camera);
  470. using (new ContextRenderingScope(context, tmp))
  471. using (new CameraRenderingScope(context, camera))
  472. {
  473. camera.gameObject.TryGetComponent<UniversalAdditionalCameraData>(out var additionalCameraData);
  474. RenderSingleCameraInternal(context, camera, ref additionalCameraData);
  475. }
  476. }
  477. }
  478. if(temporaryRT)
  479. {
  480. bool isCopySupported = false;
  481. switch(destination.dimension)
  482. {
  483. case TextureDimension.Tex2D:
  484. if((SystemInfo.copyTextureSupport & CopyTextureSupport.Basic) != 0)
  485. {
  486. isCopySupported = true;
  487. Graphics.CopyTexture(temporaryRT, 0, 0, destination, 0, mipLevel);
  488. }
  489. break;
  490. case TextureDimension.Tex2DArray:
  491. if((SystemInfo.copyTextureSupport & CopyTextureSupport.DifferentTypes) != 0)
  492. {
  493. isCopySupported = true;
  494. Graphics.CopyTexture(temporaryRT, 0, 0, destination, slice, mipLevel);
  495. }
  496. break;
  497. case TextureDimension.Tex3D:
  498. if((SystemInfo.copyTextureSupport & CopyTextureSupport.DifferentTypes) != 0)
  499. {
  500. isCopySupported = true;
  501. Graphics.CopyTexture(temporaryRT, 0, 0, destination, slice, mipLevel);
  502. }
  503. break;
  504. case TextureDimension.Cube:
  505. if((SystemInfo.copyTextureSupport & CopyTextureSupport.DifferentTypes) != 0)
  506. {
  507. isCopySupported = true;
  508. Graphics.CopyTexture(temporaryRT, 0, 0, destination, face, mipLevel);
  509. }
  510. break;
  511. case TextureDimension.CubeArray:
  512. if((SystemInfo.copyTextureSupport & CopyTextureSupport.DifferentTypes) != 0)
  513. {
  514. isCopySupported = true;
  515. Graphics.CopyTexture(temporaryRT, 0, 0, destination, face + slice * 6, mipLevel);
  516. }
  517. break;
  518. default:
  519. break;
  520. }
  521. if(!isCopySupported)
  522. Debug.LogError("RenderRequest cannot have destination texture of this format: " + Enum.GetName(typeof(TextureDimension), destination.dimension));
  523. }
  524. //restore data
  525. camera.targetTexture = originalTarget;
  526. Graphics.SetRenderTarget(originalTarget);
  527. RenderTexture.ReleaseTemporary(temporaryRT);
  528. }
  529. else
  530. {
  531. Debug.LogWarning("RenderRequest type: " + typeof(RequestData).FullName + " is either invalid or unsupported by the current pipeline");
  532. }
  533. }
  534. /// <summary>
  535. /// Standalone camera rendering. Use this to render procedural cameras.
  536. /// This method doesn't call <c>BeginCameraRendering</c> and <c>EndCameraRendering</c> callbacks.
  537. /// </summary>
  538. /// <param name="context">Render context used to record commands during execution.</param>
  539. /// <param name="camera">Camera to render.</param>
  540. /// <seealso cref="ScriptableRenderContext"/>
  541. [Obsolete("RenderSingleCamera is obsolete, please use RenderPipeline.SubmitRenderRequest with UniversalRenderer.SingleCameraRequest as RequestData type")]
  542. public static void RenderSingleCamera(ScriptableRenderContext context, Camera camera)
  543. {
  544. RenderSingleCameraInternal(context, camera);
  545. }
  546. internal static void RenderSingleCameraInternal(ScriptableRenderContext context, Camera camera)
  547. {
  548. UniversalAdditionalCameraData additionalCameraData = null;
  549. if (IsGameCamera(camera))
  550. camera.gameObject.TryGetComponent(out additionalCameraData);
  551. RenderSingleCameraInternal(context, camera, ref additionalCameraData);
  552. }
  553. internal static void RenderSingleCameraInternal(ScriptableRenderContext context, Camera camera, ref UniversalAdditionalCameraData additionalCameraData)
  554. {
  555. if (additionalCameraData != null && additionalCameraData.renderType != CameraRenderType.Base)
  556. {
  557. Debug.LogWarning("Only Base cameras can be rendered with standalone RenderSingleCamera. Camera will be skipped.");
  558. return;
  559. }
  560. var frameData = GetRenderer(camera, additionalCameraData).frameData;
  561. var cameraData = CreateCameraData(frameData, camera, additionalCameraData, true);
  562. InitializeAdditionalCameraData(camera, additionalCameraData, true, cameraData);
  563. #if ADAPTIVE_PERFORMANCE_2_0_0_OR_NEWER
  564. if (asset.useAdaptivePerformance)
  565. ApplyAdaptivePerformance(cameraData);
  566. #endif
  567. RenderSingleCamera(context, cameraData);
  568. }
  569. static bool TryGetCullingParameters(UniversalCameraData cameraData, out ScriptableCullingParameters cullingParams)
  570. {
  571. #if ENABLE_VR && ENABLE_XR_MODULE
  572. if (cameraData.xr.enabled)
  573. {
  574. cullingParams = cameraData.xr.cullingParams;
  575. // Sync the FOV on the camera to match the projection from the XR device
  576. if (!cameraData.camera.usePhysicalProperties && !XRGraphicsAutomatedTests.enabled)
  577. cameraData.camera.fieldOfView = Mathf.Rad2Deg * Mathf.Atan(1.0f / cullingParams.stereoProjectionMatrix.m11) * 2.0f;
  578. return true;
  579. }
  580. #endif
  581. return cameraData.camera.TryGetCullingParameters(false, out cullingParams);
  582. }
  583. /// <summary>
  584. /// Renders a single camera. This method will do culling, setup and execution of the renderer.
  585. /// </summary>
  586. /// <param name="context">Render context used to record commands during execution.</param>
  587. /// <param name="cameraData">Camera rendering data. This might contain data inherited from a base camera.</param>
  588. static void RenderSingleCamera(ScriptableRenderContext context, UniversalCameraData cameraData)
  589. {
  590. Camera camera = cameraData.camera;
  591. ScriptableRenderer renderer = cameraData.renderer;
  592. if (renderer == null)
  593. {
  594. Debug.LogWarning(string.Format("Trying to render {0} with an invalid renderer. Camera rendering will be skipped.", camera.name));
  595. return;
  596. }
  597. // Note: We are disposing frameData once this variable goes out of scope.
  598. using ContextContainer frameData = renderer.frameData;
  599. if (!TryGetCullingParameters(cameraData, out var cullingParameters))
  600. return;
  601. ScriptableRenderer.current = renderer;
  602. #if RENDER_GRAPH_OLD_COMPILER
  603. s_RenderGraph.nativeRenderPassesEnabled = false;
  604. Debug.LogWarning("The native render pass compiler is disabled. Use this for debugging only. Mobile performance may be sub-optimal.");
  605. #else
  606. s_RenderGraph.nativeRenderPassesEnabled = renderer.supportsNativeRenderPassRendergraphCompiler;
  607. #endif
  608. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  609. // NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name").
  610. // Currently there's an issue which results in mismatched markers.
  611. // The named CommandBuffer will close its "profiling scope" on execution.
  612. // That will orphan ProfilingScope markers as the named CommandBuffer markers are their parents.
  613. // Resulting in following pattern:
  614. // exec(cmd.start, scope.start, cmd.end) and exec(cmd.start, scope.end, cmd.end)
  615. CommandBuffer cmd = CommandBufferPool.Get();
  616. // TODO: move skybox code from C++ to URP in order to remove the call to context.Submit() inside DrawSkyboxPass
  617. // Until then, we can't use nested profiling scopes with XR multipass
  618. CommandBuffer cmdScope = cameraData.xr.enabled ? null : cmd;
  619. var cameraMetadata = CameraMetadataCache.GetCached(camera);
  620. using (new ProfilingScope(cmdScope, cameraMetadata.sampler)) // Enqueues a "BeginSample" command into the CommandBuffer cmd
  621. {
  622. renderer.Clear(cameraData.renderType);
  623. using (new ProfilingScope(Profiling.Pipeline.Renderer.setupCullingParameters))
  624. {
  625. var legacyCameraData = new CameraData(frameData);
  626. renderer.OnPreCullRenderPasses(in legacyCameraData);
  627. renderer.SetupCullingParameters(ref cullingParameters, ref legacyCameraData);
  628. }
  629. context.ExecuteCommandBuffer(cmd); // Send all the commands enqueued so far in the CommandBuffer cmd, to the ScriptableRenderContext context
  630. cmd.Clear();
  631. SetupPerCameraShaderConstants(cmd);
  632. bool supportProbeVolume = asset != null && asset.lightProbeSystem == LightProbeSystem.ProbeVolumes;
  633. ProbeReferenceVolume.instance.SetEnableStateFromSRP(supportProbeVolume);
  634. ProbeReferenceVolume.instance.SetVertexSamplingEnabled(asset.shEvalMode == ShEvalMode.PerVertex || asset.shEvalMode == ShEvalMode.Mixed);
  635. // We need to verify and flush any pending asset loading for probe volume.
  636. if (supportProbeVolume && ProbeReferenceVolume.instance.isInitialized)
  637. {
  638. ProbeReferenceVolume.instance.PerformPendingOperations();
  639. if (camera.cameraType != CameraType.Reflection &&
  640. camera.cameraType != CameraType.Preview)
  641. {
  642. // TODO: Move this to one call for all cameras
  643. ProbeReferenceVolume.instance.UpdateCellStreaming(cmd, camera);
  644. }
  645. }
  646. // Emit scene/game view UI. The main game camera UI is always rendered, so this needs to be handled only for different camera types
  647. if (camera.cameraType == CameraType.Reflection || camera.cameraType == CameraType.Preview)
  648. ScriptableRenderContext.EmitGeometryForCamera(camera);
  649. #if UNITY_EDITOR
  650. else if (isSceneViewCamera)
  651. ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
  652. #endif
  653. // do AdaptiveProbeVolume stuff
  654. if (supportProbeVolume)
  655. ProbeReferenceVolume.instance.BindAPVRuntimeResources(cmd, true);
  656. // Must be called before culling because it emits intermediate renderers via Graphics.DrawInstanced.
  657. ProbeReferenceVolume.instance.RenderDebug(camera, Texture2D.whiteTexture);
  658. // Update camera motion tracking (prev matrices) from cameraData.
  659. // Called and updated only once, as the same camera can be rendered multiple times.
  660. // NOTE: Tracks only the current (this) camera, not shadow views or any other offscreen views.
  661. // NOTE: Shared between both Execute and Render (RG) paths.
  662. if (camera.TryGetComponent<UniversalAdditionalCameraData>(out var additionalCameraData))
  663. additionalCameraData.motionVectorsPersistentData.Update(cameraData);
  664. // TODO: Move into the renderer. Problem: It modifies the AdditionalCameraData which is copied into RenderingData which causes value divergence for value types.
  665. // Update TAA persistent data based on cameraData. Most importantly resize the history render targets.
  666. // NOTE: Persistent data is kept over multiple frames. Its life-time differs from typical resources.
  667. // NOTE: Shared between both Execute and Render (RG) paths.
  668. if (cameraData.taaHistory != null)
  669. UpdateTemporalAATargets(cameraData);
  670. RTHandles.SetReferenceSize(cameraData.cameraTargetDescriptor.width, cameraData.cameraTargetDescriptor.height);
  671. // Do NOT use cameraData after 'InitializeRenderingData'. CameraData state may diverge otherwise.
  672. // RenderingData takes a copy of the CameraData.
  673. // UniversalRenderingData needs to be created here to avoid copying cullResults.
  674. var data = frameData.Create<UniversalRenderingData>();
  675. data.cullResults = context.Cull(ref cullingParameters);
  676. GPUResidentDrawer.PostCullBeginCameraRendering(new RenderRequestBatcherContext { commandBuffer = cmd });
  677. var isForwardPlus = cameraData.renderer is UniversalRenderer { renderingModeActual: RenderingMode.ForwardPlus };
  678. // Initialize all the data types required for rendering.
  679. UniversalLightData lightData;
  680. UniversalShadowData shadowData;
  681. using (new ProfilingScope(Profiling.Pipeline.initializeRenderingData))
  682. {
  683. CreateUniversalResourceData(frameData);
  684. lightData = CreateLightData(frameData, asset, data.cullResults.visibleLights);
  685. shadowData = CreateShadowData(frameData, asset, isForwardPlus);
  686. CreatePostProcessingData(frameData, asset);
  687. CreateRenderingData(frameData, asset, cmd, isForwardPlus, cameraData.renderer);
  688. }
  689. RenderingData legacyRenderingData = new RenderingData(frameData);
  690. CheckAndApplyDebugSettings(ref legacyRenderingData);
  691. #if ADAPTIVE_PERFORMANCE_2_0_0_OR_NEWER
  692. if (asset.useAdaptivePerformance)
  693. ApplyAdaptivePerformance(frameData);
  694. #endif
  695. CreateShadowAtlasAndCullShadowCasters(lightData, shadowData, cameraData, ref data.cullResults, ref context);
  696. renderer.AddRenderPasses(ref legacyRenderingData);
  697. if (useRenderGraph)
  698. {
  699. RecordAndExecuteRenderGraph(s_RenderGraph, context, renderer, cmd, cameraData.camera, cameraMetadata.name);
  700. renderer.FinishRenderGraphRendering(cmd);
  701. }
  702. else
  703. {
  704. // Disable obsolete warning for internal usage
  705. #pragma warning disable CS0618
  706. using (new ProfilingScope(Profiling.Pipeline.Renderer.setup))
  707. {
  708. renderer.Setup(context, ref legacyRenderingData);
  709. }
  710. // Timing scope inside
  711. renderer.Execute(context, ref legacyRenderingData);
  712. #pragma warning restore CS0618
  713. }
  714. } // When ProfilingSample goes out of scope, an "EndSample" command is enqueued into CommandBuffer cmd
  715. context.ExecuteCommandBuffer(cmd); // Sends to ScriptableRenderContext all the commands enqueued since cmd.Clear, i.e the "EndSample" command
  716. CommandBufferPool.Release(cmd);
  717. using (new ProfilingScope(Profiling.Pipeline.Context.submit))
  718. {
  719. // Render Graph will do the validation by itself, so this is redundant in that case
  720. if (!useRenderGraph && renderer.useRenderPassEnabled && !context.SubmitForRenderPassValidation())
  721. {
  722. renderer.useRenderPassEnabled = false;
  723. cmd.SetKeyword(ShaderGlobalKeywords.RenderPassEnabled, false);
  724. Debug.LogWarning("Rendering command not supported inside a native RenderPass found. Falling back to non-RenderPass rendering path");
  725. }
  726. context.Submit(); // Actually execute the commands that we previously sent to the ScriptableRenderContext context
  727. }
  728. ScriptableRenderer.current = null;
  729. }
  730. private static void CreateShadowAtlasAndCullShadowCasters(UniversalLightData lightData, UniversalShadowData shadowData, UniversalCameraData cameraData, ref CullingResults cullResults, ref ScriptableRenderContext context)
  731. {
  732. if (!shadowData.supportsMainLightShadows && !shadowData.supportsAdditionalLightShadows)
  733. return;
  734. if (shadowData.supportsMainLightShadows)
  735. InitializeMainLightShadowResolution(shadowData);
  736. if (shadowData.supportsAdditionalLightShadows)
  737. shadowData.shadowAtlasLayout = BuildAdditionalLightsShadowAtlasLayout(lightData, shadowData, cameraData);
  738. shadowData.visibleLightsShadowCullingInfos = ShadowCulling.CullShadowCasters(ref context, shadowData, ref shadowData.shadowAtlasLayout, ref cullResults);
  739. }
  740. /// <summary>
  741. /// Renders a camera stack. This method calls RenderSingleCamera for each valid camera in the stack.
  742. /// The last camera resolves the final target to screen.
  743. /// </summary>
  744. /// <param name="context">Render context used to record commands during execution.</param>
  745. /// <param name="camera">Camera to render.</param>
  746. static void RenderCameraStack(ScriptableRenderContext context, Camera baseCamera)
  747. {
  748. using var profScope = new ProfilingScope(ProfilingSampler.Get(URPProfileId.RenderCameraStack));
  749. baseCamera.TryGetComponent<UniversalAdditionalCameraData>(out var baseCameraAdditionalData);
  750. // Overlay cameras will be rendered stacked while rendering base cameras
  751. if (baseCameraAdditionalData != null && baseCameraAdditionalData.renderType == CameraRenderType.Overlay)
  752. return;
  753. // Renderer contains a stack if it has additional data and the renderer supports stacking
  754. // The renderer is checked if it supports Base camera. Since Base is the only relevant type at this moment.
  755. var renderer = GetRenderer(baseCamera, baseCameraAdditionalData);
  756. bool supportsCameraStacking = renderer != null && renderer.SupportsCameraStackingType(CameraRenderType.Base);
  757. List<Camera> cameraStack = (supportsCameraStacking) ? baseCameraAdditionalData?.cameraStack : null;
  758. bool anyPostProcessingEnabled = baseCameraAdditionalData != null && baseCameraAdditionalData.renderPostProcessing;
  759. bool mainHdrDisplayOutputActive = HDROutputForMainDisplayIsActive();
  760. int rendererCount = asset.m_RendererDataList.Length;
  761. // We need to know the last active camera in the stack to be able to resolve
  762. // rendering to screen when rendering it. The last camera in the stack is not
  763. // necessarily the last active one as it users might disable it.
  764. int lastActiveOverlayCameraIndex = -1;
  765. if (cameraStack != null)
  766. {
  767. var baseCameraRendererType = renderer.GetType();
  768. bool shouldUpdateCameraStack = false;
  769. cameraStackRequiresDepthForPostprocessing = false;
  770. for (int i = 0; i < cameraStack.Count; ++i)
  771. {
  772. Camera overlayCamera = cameraStack[i];
  773. if (overlayCamera == null)
  774. {
  775. shouldUpdateCameraStack = true;
  776. continue;
  777. }
  778. if (overlayCamera.isActiveAndEnabled)
  779. {
  780. overlayCamera.TryGetComponent<UniversalAdditionalCameraData>(out var data);
  781. var overlayRenderer = GetRenderer(overlayCamera, data);
  782. // Checking if the base and the overlay camera is of the same renderer type.
  783. var overlayRendererType = overlayRenderer.GetType();
  784. if (overlayRendererType != baseCameraRendererType)
  785. {
  786. Debug.LogWarning("Only cameras with compatible renderer types can be stacked. " +
  787. $"The camera: {overlayCamera.name} are using the renderer {overlayRendererType.Name}, " +
  788. $"but the base camera: {baseCamera.name} are using {baseCameraRendererType.Name}. Will skip rendering");
  789. continue;
  790. }
  791. // Checking if they are the same renderer type but just not supporting Overlay
  792. if ((overlayRenderer.SupportedCameraStackingTypes() & 1 << (int)CameraRenderType.Overlay) == 0)
  793. {
  794. Debug.LogWarning($"The camera: {overlayCamera.name} is using a renderer of type {renderer.GetType().Name} which does not support Overlay cameras in it's current state.");
  795. continue;
  796. }
  797. if (data == null || data.renderType != CameraRenderType.Overlay)
  798. {
  799. Debug.LogWarning($"Stack can only contain Overlay cameras. The camera: {overlayCamera.name} " +
  800. $"has a type {data.renderType} that is not supported. Will skip rendering.");
  801. continue;
  802. }
  803. cameraStackRequiresDepthForPostprocessing |= CheckPostProcessForDepth();
  804. anyPostProcessingEnabled |= data.renderPostProcessing;
  805. lastActiveOverlayCameraIndex = i;
  806. }
  807. }
  808. if (shouldUpdateCameraStack)
  809. {
  810. baseCameraAdditionalData.UpdateCameraStack();
  811. }
  812. }
  813. bool isStackedRendering = lastActiveOverlayCameraIndex != -1;
  814. // Prepare XR rendering
  815. var xrActive = false;
  816. var xrRendering = baseCameraAdditionalData?.allowXRRendering ?? true;
  817. var xrLayout = XRSystem.NewLayout();
  818. xrLayout.AddCamera(baseCamera, xrRendering);
  819. // With XR multi-pass enabled, each camera can be rendered multiple times with different parameters
  820. foreach ((Camera _, XRPass xrPass) in xrLayout.GetActivePasses())
  821. {
  822. var xrPassUniversal = xrPass as XRPassUniversal;
  823. if (xrPass.enabled)
  824. {
  825. xrActive = true;
  826. UpdateCameraStereoMatrices(baseCamera, xrPass);
  827. // Apply XR display's viewport scale to URP's dynamic resolution solution
  828. float xrViewportScale = XRSystem.GetRenderViewportScale();
  829. ScalableBufferManager.ResizeBuffers(xrViewportScale, xrViewportScale);
  830. }
  831. bool finalOutputHDR = false;
  832. #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
  833. VFX.VFXCameraXRSettings cameraXRSettings;
  834. #endif
  835. using (new CameraRenderingScope(context, baseCamera))
  836. {
  837. // Update volumeframework before initializing additional camera data
  838. UpdateVolumeFramework(baseCamera, baseCameraAdditionalData);
  839. ContextContainer frameData = renderer.frameData;
  840. UniversalCameraData baseCameraData = CreateCameraData(frameData, baseCamera,
  841. baseCameraAdditionalData, !isStackedRendering);
  842. #if ENABLE_VR && ENABLE_XR_MODULE
  843. if (xrPass.enabled)
  844. {
  845. baseCameraData.xr = xrPass;
  846. // Helper function for updating cameraData with xrPass Data
  847. // Need to update XRSystem using baseCameraData to handle the case where camera position is modified in BeginCameraRendering
  848. UpdateCameraData(baseCameraData, xrPass);
  849. // Handle the case where camera position is modified in BeginCameraRendering
  850. xrLayout.ReconfigurePass(xrPass, baseCamera);
  851. XRSystemUniversal.BeginLateLatching(baseCamera, xrPassUniversal);
  852. }
  853. #endif
  854. // InitializeAdditionalCameraData needs to be initialized after the cameraTargetDescriptor is set because it needs to know the
  855. // msaa level of cameraTargetDescriptor and XR modifications.
  856. InitializeAdditionalCameraData(baseCamera, baseCameraAdditionalData, !isStackedRendering,
  857. baseCameraData);
  858. #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
  859. //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
  860. cameraXRSettings.viewTotal = baseCameraData.xr.enabled ? 2u : 1u;
  861. cameraXRSettings.viewCount = baseCameraData.xr.enabled ? (uint)baseCameraData.xr.viewCount : 1u;
  862. cameraXRSettings.viewOffset = (uint)baseCameraData.xr.multipassId;
  863. VFX.VFXManager.PrepareCamera(baseCamera, cameraXRSettings);
  864. #endif
  865. #if ADAPTIVE_PERFORMANCE_2_0_0_OR_NEWER
  866. if (asset.useAdaptivePerformance)
  867. ApplyAdaptivePerformance(baseCameraData);
  868. #endif
  869. // update the base camera flag so that the scene depth is stored if needed by overlay cameras later in the frame
  870. baseCameraData.postProcessingRequiresDepthTexture |= cameraStackRequiresDepthForPostprocessing;
  871. // Check whether the camera stack final output is HDR
  872. // This is equivalent of UniversalCameraData.isHDROutputActive but without necessiting the base camera to be the last camera in the stack.
  873. bool hdrDisplayOutputActive = mainHdrDisplayOutputActive;
  874. #if ENABLE_VR && ENABLE_XR_MODULE
  875. // If we are rendering to xr then we need to look at the XR Display rather than the main non-xr display.
  876. if (xrPass.enabled)
  877. hdrDisplayOutputActive = xrPass.isHDRDisplayOutputActive;
  878. #endif
  879. finalOutputHDR =
  880. asset.supportsHDR &&
  881. hdrDisplayOutputActive // Check whether any HDR display is active and the render pipeline asset allows HDR rendering
  882. && baseCamera.targetTexture == null &&
  883. (baseCamera.cameraType == CameraType.Game ||
  884. baseCamera.cameraType == CameraType.VR) // Check whether the stack outputs to a screen
  885. && baseCameraData.allowHDROutput; // Check whether the base camera allows HDR output
  886. // Update stack-related parameters
  887. baseCameraData.stackAnyPostProcessingEnabled = anyPostProcessingEnabled;
  888. baseCameraData.stackLastCameraOutputToHDR = finalOutputHDR;
  889. RenderSingleCamera(context, baseCameraData);
  890. }
  891. // Late latching is not supported after this point
  892. if (xrPass.enabled)
  893. XRSystemUniversal.EndLateLatching(baseCamera, xrPassUniversal);
  894. if (isStackedRendering)
  895. {
  896. for (int i = 0; i < cameraStack.Count; ++i)
  897. {
  898. var overlayCamera = cameraStack[i];
  899. if (!overlayCamera.isActiveAndEnabled)
  900. continue;
  901. overlayCamera.TryGetComponent<UniversalAdditionalCameraData>(out var overlayAdditionalCameraData);
  902. // Camera is overlay and enabled
  903. if (overlayAdditionalCameraData != null)
  904. {
  905. ContextContainer overlayFrameData = GetRenderer(overlayCamera, overlayAdditionalCameraData).frameData;
  906. UniversalCameraData overlayCameraData = CreateCameraData(overlayFrameData, baseCamera, baseCameraAdditionalData, false);
  907. #if ENABLE_VR && ENABLE_XR_MODULE
  908. if (xrPass.enabled)
  909. {
  910. overlayCameraData.xr = xrPass;
  911. UpdateCameraData(overlayCameraData, xrPass);
  912. }
  913. #endif
  914. InitializeAdditionalCameraData(overlayCamera, overlayAdditionalCameraData, false, overlayCameraData);
  915. overlayCameraData.camera = overlayCamera;
  916. overlayCameraData.baseCamera = baseCamera;
  917. UpdateCameraStereoMatrices(overlayAdditionalCameraData.camera, xrPass);
  918. using (new CameraRenderingScope(context, overlayCamera))
  919. {
  920. #if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
  921. //It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
  922. VFX.VFXManager.PrepareCamera(overlayCamera, cameraXRSettings);
  923. #endif
  924. UpdateVolumeFramework(overlayCamera, overlayAdditionalCameraData);
  925. bool lastCamera = i == lastActiveOverlayCameraIndex;
  926. InitializeAdditionalCameraData(overlayCamera, overlayAdditionalCameraData, lastCamera, overlayCameraData);
  927. overlayCameraData.stackAnyPostProcessingEnabled = anyPostProcessingEnabled;
  928. overlayCameraData.stackLastCameraOutputToHDR = finalOutputHDR;
  929. xrLayout.ReconfigurePass(overlayCameraData.xr, overlayCamera);
  930. RenderSingleCamera(context, overlayCameraData);
  931. }
  932. }
  933. }
  934. }
  935. }
  936. if (xrActive)
  937. {
  938. CommandBuffer cmd = CommandBufferPool.Get();
  939. XRSystem.RenderMirrorView(cmd, baseCamera);
  940. context.ExecuteCommandBuffer(cmd);
  941. context.Submit();
  942. CommandBufferPool.Release(cmd);
  943. }
  944. XRSystem.EndLayout();
  945. }
  946. // Used for updating URP cameraData data struct with XRPass data.
  947. static void UpdateCameraData(UniversalCameraData baseCameraData, in XRPass xr)
  948. {
  949. // Update cameraData viewport for XR
  950. Rect cameraRect = baseCameraData.camera.rect;
  951. Rect xrViewport = xr.GetViewport();
  952. baseCameraData.pixelRect = new Rect(cameraRect.x * xrViewport.width + xrViewport.x,
  953. cameraRect.y * xrViewport.height + xrViewport.y,
  954. cameraRect.width * xrViewport.width,
  955. cameraRect.height * xrViewport.height);
  956. Rect camPixelRect = baseCameraData.pixelRect;
  957. baseCameraData.pixelWidth = (int)System.Math.Round(camPixelRect.width + camPixelRect.x) - (int)System.Math.Round(camPixelRect.x);
  958. baseCameraData.pixelHeight = (int)System.Math.Round(camPixelRect.height + camPixelRect.y) - (int)System.Math.Round(camPixelRect.y);
  959. baseCameraData.aspectRatio = (float)baseCameraData.pixelWidth / (float)baseCameraData.pixelHeight;
  960. // Update cameraData cameraTargetDescriptor for XR. This descriptor is mainly used for configuring intermediate screen space textures
  961. var originalTargetDesc = baseCameraData.cameraTargetDescriptor;
  962. baseCameraData.cameraTargetDescriptor = xr.renderTargetDesc;
  963. if (baseCameraData.isHdrEnabled)
  964. {
  965. baseCameraData.cameraTargetDescriptor.graphicsFormat = originalTargetDesc.graphicsFormat;
  966. }
  967. baseCameraData.cameraTargetDescriptor.msaaSamples = originalTargetDesc.msaaSamples;
  968. if (baseCameraData.isDefaultViewport)
  969. {
  970. // When viewport is default, intermediate textures created with this descriptor will have dynamic resolution enabled.
  971. baseCameraData.cameraTargetDescriptor.useDynamicScale = true;
  972. }
  973. else
  974. {
  975. // Some effects like Vignette computes aspect ratio from width and height. We have to take viewport into consideration if it is not default viewport.
  976. baseCameraData.cameraTargetDescriptor.width = baseCameraData.pixelWidth;
  977. baseCameraData.cameraTargetDescriptor.height = baseCameraData.pixelHeight;
  978. baseCameraData.cameraTargetDescriptor.useDynamicScale = false;
  979. }
  980. }
  981. static void UpdateVolumeFramework(Camera camera, UniversalAdditionalCameraData additionalCameraData)
  982. {
  983. using var profScope = new ProfilingScope(ProfilingSampler.Get(URPProfileId.UpdateVolumeFramework));
  984. // We update the volume framework for:
  985. // * All cameras in the editor when not in playmode
  986. // * scene cameras
  987. // * cameras with update mode set to EveryFrame
  988. // * cameras with update mode set to UsePipelineSettings and the URP Asset set to EveryFrame
  989. bool shouldUpdate = camera.cameraType == CameraType.SceneView;
  990. shouldUpdate |= additionalCameraData != null && additionalCameraData.requiresVolumeFrameworkUpdate;
  991. #if UNITY_EDITOR
  992. shouldUpdate |= Application.isPlaying == false;
  993. #endif
  994. // When we have volume updates per-frame disabled...
  995. if (!shouldUpdate && additionalCameraData)
  996. {
  997. // If an invalid volume stack is present, destroy it
  998. if (additionalCameraData.volumeStack != null && !additionalCameraData.volumeStack.isValid)
  999. {
  1000. camera.DestroyVolumeStack(additionalCameraData);
  1001. }
  1002. // Create a local volume stack and cache the state if it's null
  1003. if (additionalCameraData.volumeStack == null)
  1004. {
  1005. camera.UpdateVolumeStack(additionalCameraData);
  1006. }
  1007. VolumeManager.instance.stack = additionalCameraData.volumeStack;
  1008. return;
  1009. }
  1010. // When we want to update the volumes every frame...
  1011. // We destroy the volumeStack in the additional camera data, if present, to make sure
  1012. // it gets recreated and initialized if the update mode gets later changed to ViaScripting...
  1013. if (additionalCameraData && additionalCameraData.volumeStack != null)
  1014. {
  1015. camera.DestroyVolumeStack(additionalCameraData);
  1016. }
  1017. // Get the mask + trigger and update the stack
  1018. camera.GetVolumeLayerMaskAndTrigger(additionalCameraData, out LayerMask layerMask, out Transform trigger);
  1019. VolumeManager.instance.ResetMainStack();
  1020. VolumeManager.instance.Update(trigger, layerMask);
  1021. }
  1022. static bool CheckPostProcessForDepth(UniversalCameraData cameraData)
  1023. {
  1024. if (!cameraData.postProcessEnabled)
  1025. return false;
  1026. if ((cameraData.antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing || cameraData.IsTemporalAAEnabled())
  1027. && cameraData.renderType == CameraRenderType.Base)
  1028. return true;
  1029. return CheckPostProcessForDepth();
  1030. }
  1031. static bool CheckPostProcessForDepth()
  1032. {
  1033. var stack = VolumeManager.instance.stack;
  1034. if (stack.GetComponent<DepthOfField>().IsActive())
  1035. return true;
  1036. if (stack.GetComponent<MotionBlur>().IsActive())
  1037. return true;
  1038. return false;
  1039. }
  1040. static void SetSupportedRenderingFeatures(UniversalRenderPipelineAsset pipelineAsset)
  1041. {
  1042. #if UNITY_EDITOR
  1043. SupportedRenderingFeatures.active = new SupportedRenderingFeatures()
  1044. {
  1045. reflectionProbeModes = SupportedRenderingFeatures.ReflectionProbeModes.None,
  1046. defaultMixedLightingModes = SupportedRenderingFeatures.LightmapMixedBakeModes.Subtractive,
  1047. mixedLightingModes = SupportedRenderingFeatures.LightmapMixedBakeModes.Subtractive | SupportedRenderingFeatures.LightmapMixedBakeModes.IndirectOnly | SupportedRenderingFeatures.LightmapMixedBakeModes.Shadowmask,
  1048. lightmapBakeTypes = LightmapBakeType.Baked | LightmapBakeType.Mixed | LightmapBakeType.Realtime,
  1049. lightmapsModes = LightmapsMode.CombinedDirectional | LightmapsMode.NonDirectional,
  1050. lightProbeProxyVolumes = false,
  1051. motionVectors = true,
  1052. receiveShadows = false,
  1053. reflectionProbes = false,
  1054. reflectionProbesBlendDistance = true,
  1055. particleSystemInstancing = true,
  1056. overridesEnableLODCrossFade = true
  1057. };
  1058. SceneViewDrawMode.SetupDrawMode();
  1059. #endif
  1060. SupportedRenderingFeatures.active.supportsHDR = pipelineAsset.supportsHDR;
  1061. SupportedRenderingFeatures.active.rendersUIOverlay = true;
  1062. }
  1063. static ScriptableRenderer GetRenderer(Camera camera, UniversalAdditionalCameraData additionalCameraData)
  1064. {
  1065. var renderer = additionalCameraData != null ? additionalCameraData.scriptableRenderer : null;
  1066. if (renderer == null || camera.cameraType == CameraType.SceneView)
  1067. renderer = asset.scriptableRenderer;
  1068. return renderer;
  1069. }
  1070. static UniversalCameraData CreateCameraData(ContextContainer frameData, Camera camera, UniversalAdditionalCameraData additionalCameraData, bool resolveFinalTarget)
  1071. {
  1072. using var profScope = new ProfilingScope(Profiling.Pipeline.initializeCameraData);
  1073. var renderer = GetRenderer(camera, additionalCameraData);
  1074. UniversalCameraData cameraData = frameData.Create<UniversalCameraData>();
  1075. InitializeStackedCameraData(camera, additionalCameraData, cameraData);
  1076. cameraData.camera = camera;
  1077. // Add reference to writable camera history to give access to injected user render passes which can produce history.
  1078. cameraData.historyManager = additionalCameraData?.historyManager;
  1079. ///////////////////////////////////////////////////////////////////
  1080. // Descriptor settings /
  1081. ///////////////////////////////////////////////////////////////////
  1082. bool rendererSupportsMSAA = renderer != null && renderer.supportedRenderingFeatures.msaa;
  1083. int msaaSamples = 1;
  1084. if (camera.allowMSAA && asset.msaaSampleCount > 1 && rendererSupportsMSAA)
  1085. msaaSamples = (camera.targetTexture != null) ? camera.targetTexture.antiAliasing : asset.msaaSampleCount;
  1086. // Use XR's MSAA if camera is XR camera. XR MSAA needs special handle here because it is not per Camera.
  1087. // Multiple cameras could render into the same XR display and they should share the same MSAA level.
  1088. // However it should still respect the sample count of the target texture camera is rendering to.
  1089. if (cameraData.xrRendering && rendererSupportsMSAA && camera.targetTexture == null)
  1090. msaaSamples = (int)XRSystem.GetDisplayMSAASamples();
  1091. bool needsAlphaChannel = Graphics.preserveFramebufferAlpha;
  1092. cameraData.hdrColorBufferPrecision = asset ? asset.hdrColorBufferPrecision : HDRColorBufferPrecision._32Bits;
  1093. cameraData.cameraTargetDescriptor = CreateRenderTextureDescriptor(camera, cameraData,
  1094. cameraData.isHdrEnabled, cameraData.hdrColorBufferPrecision, msaaSamples, needsAlphaChannel, cameraData.requiresOpaqueTexture);
  1095. uint count = GraphicsFormatUtility.GetAlphaComponentCount(cameraData.cameraTargetDescriptor.graphicsFormat);
  1096. cameraData.isAlphaOutputEnabled = GraphicsFormatUtility.HasAlphaChannel(cameraData.cameraTargetDescriptor.graphicsFormat);
  1097. if (cameraData.camera.cameraType == CameraType.SceneView && CoreUtils.IsSceneFilteringEnabled())
  1098. cameraData.isAlphaOutputEnabled = true;
  1099. return cameraData;
  1100. }
  1101. /// <summary>
  1102. /// Initialize camera data settings common for all cameras in the stack. Overlay cameras will inherit
  1103. /// settings from base camera.
  1104. /// </summary>
  1105. /// <param name="baseCamera">Base camera to inherit settings from.</param>
  1106. /// <param name="baseAdditionalCameraData">Component that contains additional base camera data.</param>
  1107. /// <param name="cameraData">Camera data to initialize setttings.</param>
  1108. static void InitializeStackedCameraData(Camera baseCamera, UniversalAdditionalCameraData baseAdditionalCameraData, UniversalCameraData cameraData)
  1109. {
  1110. using var profScope = new ProfilingScope(Profiling.Pipeline.initializeStackedCameraData);
  1111. var settings = asset;
  1112. cameraData.targetTexture = baseCamera.targetTexture;
  1113. cameraData.cameraType = baseCamera.cameraType;
  1114. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  1115. ///////////////////////////////////////////////////////////////////
  1116. // Environment and Post-processing settings /
  1117. ///////////////////////////////////////////////////////////////////
  1118. if (isSceneViewCamera)
  1119. {
  1120. cameraData.volumeLayerMask = 1; // "Default"
  1121. cameraData.volumeTrigger = null;
  1122. cameraData.isStopNaNEnabled = false;
  1123. cameraData.isDitheringEnabled = false;
  1124. cameraData.antialiasing = AntialiasingMode.None;
  1125. cameraData.antialiasingQuality = AntialiasingQuality.High;
  1126. cameraData.xrRendering = false;
  1127. cameraData.allowHDROutput = false;
  1128. }
  1129. else if (baseAdditionalCameraData != null)
  1130. {
  1131. cameraData.volumeLayerMask = baseAdditionalCameraData.volumeLayerMask;
  1132. cameraData.volumeTrigger = baseAdditionalCameraData.volumeTrigger == null ? baseCamera.transform : baseAdditionalCameraData.volumeTrigger;
  1133. cameraData.isStopNaNEnabled = baseAdditionalCameraData.stopNaN && SystemInfo.graphicsShaderLevel >= 35;
  1134. cameraData.isDitheringEnabled = baseAdditionalCameraData.dithering;
  1135. cameraData.antialiasing = baseAdditionalCameraData.antialiasing;
  1136. cameraData.antialiasingQuality = baseAdditionalCameraData.antialiasingQuality;
  1137. cameraData.xrRendering = baseAdditionalCameraData.allowXRRendering && XRSystem.displayActive;
  1138. cameraData.allowHDROutput = baseAdditionalCameraData.allowHDROutput;
  1139. }
  1140. else
  1141. {
  1142. cameraData.volumeLayerMask = 1; // "Default"
  1143. cameraData.volumeTrigger = null;
  1144. cameraData.isStopNaNEnabled = false;
  1145. cameraData.isDitheringEnabled = false;
  1146. cameraData.antialiasing = AntialiasingMode.None;
  1147. cameraData.antialiasingQuality = AntialiasingQuality.High;
  1148. cameraData.xrRendering = XRSystem.displayActive;
  1149. cameraData.allowHDROutput = true;
  1150. }
  1151. ///////////////////////////////////////////////////////////////////
  1152. // Settings that control output of the camera /
  1153. ///////////////////////////////////////////////////////////////////
  1154. cameraData.isHdrEnabled = baseCamera.allowHDR && settings.supportsHDR;
  1155. cameraData.allowHDROutput &= settings.supportsHDR;
  1156. Rect cameraRect = baseCamera.rect;
  1157. cameraData.pixelRect = baseCamera.pixelRect;
  1158. cameraData.pixelWidth = baseCamera.pixelWidth;
  1159. cameraData.pixelHeight = baseCamera.pixelHeight;
  1160. cameraData.aspectRatio = (float)cameraData.pixelWidth / (float)cameraData.pixelHeight;
  1161. cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f ||
  1162. Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f));
  1163. bool isScenePreviewOrReflectionCamera = cameraData.cameraType == CameraType.SceneView || cameraData.cameraType == CameraType.Preview || cameraData.cameraType == CameraType.Reflection;
  1164. // Discard variations lesser than kRenderScaleThreshold.
  1165. // Scale is only enabled for gameview.
  1166. const float kRenderScaleThreshold = 0.05f;
  1167. bool disableRenderScale = ((Mathf.Abs(1.0f - settings.renderScale) < kRenderScaleThreshold) || isScenePreviewOrReflectionCamera);
  1168. cameraData.renderScale = disableRenderScale ? 1.0f : settings.renderScale;
  1169. bool enableRenderGraph =
  1170. GraphicsSettings.TryGetRenderPipelineSettings<RenderGraphSettings>(out var renderGraphSettings) &&
  1171. !renderGraphSettings.enableRenderCompatibilityMode;
  1172. // Convert the upscaling filter selection from the pipeline asset into an image upscaling filter
  1173. cameraData.upscalingFilter = ResolveUpscalingFilterSelection(new Vector2(cameraData.pixelWidth, cameraData.pixelHeight), cameraData.renderScale, settings.upscalingFilter, enableRenderGraph);
  1174. if (cameraData.renderScale > 1.0f)
  1175. {
  1176. cameraData.imageScalingMode = ImageScalingMode.Downscaling;
  1177. }
  1178. else if ((cameraData.renderScale < 1.0f) || (!isScenePreviewOrReflectionCamera && ((cameraData.upscalingFilter == ImageUpscalingFilter.FSR) || (cameraData.upscalingFilter == ImageUpscalingFilter.STP))))
  1179. {
  1180. // When certain upscalers are enabled, we still consider 100% render scale an upscaling operation. (This behavior is only intended for game view cameras)
  1181. // This allows us to run the upscaling shader passes all the time since they improve visual quality even at 100% scale.
  1182. cameraData.imageScalingMode = ImageScalingMode.Upscaling;
  1183. // When STP is enabled, we force temporal anti-aliasing on since it's a prerequisite.
  1184. if (cameraData.upscalingFilter == ImageUpscalingFilter.STP)
  1185. {
  1186. cameraData.antialiasing = AntialiasingMode.TemporalAntiAliasing;
  1187. }
  1188. }
  1189. else
  1190. {
  1191. cameraData.imageScalingMode = ImageScalingMode.None;
  1192. }
  1193. cameraData.fsrOverrideSharpness = settings.fsrOverrideSharpness;
  1194. cameraData.fsrSharpness = settings.fsrSharpness;
  1195. cameraData.xr = XRSystem.emptyPass;
  1196. XRSystem.SetRenderScale(cameraData.renderScale);
  1197. var commonOpaqueFlags = SortingCriteria.CommonOpaque;
  1198. var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder;
  1199. bool hasHSRGPU = SystemInfo.hasHiddenSurfaceRemovalOnGPU;
  1200. bool canSkipFrontToBackSorting = (baseCamera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || baseCamera.opaqueSortMode == OpaqueSortMode.NoDistanceSort;
  1201. cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags;
  1202. cameraData.captureActions = Unity.RenderPipelines.Core.Runtime.Shared.CameraCaptureBridge.GetCachedCaptureActionsEnumerator(baseCamera);
  1203. }
  1204. /// <summary>
  1205. /// Initialize settings that can be different for each camera in the stack.
  1206. /// </summary>
  1207. /// <param name="camera">Camera to initialize settings from.</param>
  1208. /// <param name="additionalCameraData">Additional camera data component to initialize settings from.</param>
  1209. /// <param name="resolveFinalTarget">True if this is the last camera in the stack and rendering should resolve to camera target.</param>
  1210. /// <param name="cameraData">Settings to be initilized.</param>
  1211. static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, bool resolveFinalTarget, UniversalCameraData cameraData)
  1212. {
  1213. using var profScope = new ProfilingScope(Profiling.Pipeline.initializeAdditionalCameraData);
  1214. var renderer = GetRenderer(camera, additionalCameraData);
  1215. var settings = asset;
  1216. bool anyShadowsEnabled = settings.supportsMainLightShadows || settings.supportsAdditionalLightShadows;
  1217. cameraData.maxShadowDistance = Mathf.Min(settings.shadowDistance, camera.farClipPlane);
  1218. cameraData.maxShadowDistance = (anyShadowsEnabled && cameraData.maxShadowDistance >= camera.nearClipPlane) ? cameraData.maxShadowDistance : 0.0f;
  1219. bool isSceneViewCamera = cameraData.isSceneViewCamera;
  1220. if (isSceneViewCamera)
  1221. {
  1222. cameraData.renderType = CameraRenderType.Base;
  1223. cameraData.clearDepth = true;
  1224. cameraData.postProcessEnabled = CoreUtils.ArePostProcessesEnabled(camera);
  1225. cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture;
  1226. cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture;
  1227. cameraData.useScreenCoordOverride = false;
  1228. cameraData.screenSizeOverride = cameraData.pixelRect.size;
  1229. cameraData.screenCoordScaleBias = Vector2.one;
  1230. }
  1231. else if (additionalCameraData != null)
  1232. {
  1233. cameraData.renderType = additionalCameraData.renderType;
  1234. cameraData.clearDepth = (additionalCameraData.renderType != CameraRenderType.Base) ? additionalCameraData.clearDepth : true;
  1235. cameraData.postProcessEnabled = additionalCameraData.renderPostProcessing;
  1236. cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f;
  1237. cameraData.requiresDepthTexture = additionalCameraData.requiresDepthTexture;
  1238. cameraData.requiresOpaqueTexture = additionalCameraData.requiresColorTexture;
  1239. cameraData.useScreenCoordOverride = additionalCameraData.useScreenCoordOverride;
  1240. cameraData.screenSizeOverride = additionalCameraData.screenSizeOverride;
  1241. cameraData.screenCoordScaleBias = additionalCameraData.screenCoordScaleBias;
  1242. }
  1243. else
  1244. {
  1245. cameraData.renderType = CameraRenderType.Base;
  1246. cameraData.clearDepth = true;
  1247. cameraData.postProcessEnabled = false;
  1248. cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture;
  1249. cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture;
  1250. cameraData.useScreenCoordOverride = false;
  1251. cameraData.screenSizeOverride = cameraData.pixelRect.size;
  1252. cameraData.screenCoordScaleBias = Vector2.one;
  1253. }
  1254. cameraData.renderer = renderer;
  1255. cameraData.requiresDepthTexture |= isSceneViewCamera;
  1256. cameraData.postProcessingRequiresDepthTexture = CheckPostProcessForDepth(cameraData);
  1257. cameraData.resolveFinalTarget = resolveFinalTarget;
  1258. // enable GPU occlusion culling in game and scene views only
  1259. cameraData.useGPUOcclusionCulling = GPUResidentDrawer.IsInstanceOcclusionCullingEnabled()
  1260. && renderer.supportsGPUOcclusion
  1261. && camera.cameraType is CameraType.SceneView or CameraType.Game or CameraType.Preview;
  1262. cameraData.requiresDepthTexture |= cameraData.useGPUOcclusionCulling;
  1263. // Disable depth and color copy. We should add it in the renderer instead to avoid performance pitfalls
  1264. // of camera stacking breaking render pass execution implicitly.
  1265. bool isOverlayCamera = (cameraData.renderType == CameraRenderType.Overlay);
  1266. if (isOverlayCamera)
  1267. {
  1268. cameraData.requiresOpaqueTexture = false;
  1269. }
  1270. // NOTE: TAA depends on XR modifications of cameraTargetDescriptor.
  1271. if (additionalCameraData != null)
  1272. UpdateTemporalAAData(cameraData, additionalCameraData);
  1273. Matrix4x4 projectionMatrix = camera.projectionMatrix;
  1274. // Overlay cameras inherit viewport from base.
  1275. // If the viewport is different between them we might need to patch the projection to adjust aspect ratio
  1276. // matrix to prevent squishing when rendering objects in overlay cameras.
  1277. if (isOverlayCamera && !camera.orthographic && cameraData.pixelRect != camera.pixelRect)
  1278. {
  1279. // m00 = (cotangent / aspect), therefore m00 * aspect gives us cotangent.
  1280. float cotangent = camera.projectionMatrix.m00 * camera.aspect;
  1281. // Get new m00 by dividing by base camera aspectRatio.
  1282. float newCotangent = cotangent / cameraData.aspectRatio;
  1283. projectionMatrix.m00 = newCotangent;
  1284. }
  1285. // TAA debug settings
  1286. // Affects the jitter set just below. Do not move.
  1287. ApplyTaaRenderingDebugOverrides(ref cameraData.taaSettings);
  1288. // Depends on the cameraTargetDesc, size and MSAA also XR modifications of those.
  1289. TemporalAA.JitterFunc jitterFunc = cameraData.IsSTPEnabled() ? StpUtils.s_JitterFunc : TemporalAA.s_JitterFunc;
  1290. Matrix4x4 jitterMat = TemporalAA.CalculateJitterMatrix(cameraData, jitterFunc);
  1291. cameraData.SetViewProjectionAndJitterMatrix(camera.worldToCameraMatrix, projectionMatrix, jitterMat);
  1292. cameraData.worldSpaceCameraPos = camera.transform.position;
  1293. var backgroundColorSRGB = camera.backgroundColor;
  1294. // Get the background color from preferences if preview camera
  1295. #if UNITY_EDITOR
  1296. if (camera.cameraType == CameraType.Preview && camera.clearFlags != CameraClearFlags.SolidColor)
  1297. {
  1298. backgroundColorSRGB = CoreRenderPipelinePreferences.previewBackgroundColor;
  1299. }
  1300. #endif
  1301. cameraData.backgroundColor = CoreUtils.ConvertSRGBToActiveColorSpace(backgroundColorSRGB);
  1302. cameraData.stackAnyPostProcessingEnabled = cameraData.postProcessEnabled;
  1303. cameraData.stackLastCameraOutputToHDR = cameraData.isHDROutputActive;
  1304. // Apply post-processing settings to the alpha output.
  1305. // cameraData.isAlphaOutputEnabled is set based on target alpha channel availability on create. Target can be a RenderTexture or the back-buffer.
  1306. bool allowAlphaOutput = !cameraData.postProcessEnabled || (cameraData.postProcessEnabled && settings.allowPostProcessAlphaOutput);
  1307. cameraData.isAlphaOutputEnabled = cameraData.isAlphaOutputEnabled && allowAlphaOutput;
  1308. }
  1309. static UniversalRenderingData CreateRenderingData(ContextContainer frameData, UniversalRenderPipelineAsset settings, CommandBuffer cmd, bool isForwardPlus, ScriptableRenderer renderer)
  1310. {
  1311. UniversalLightData universalLightData = frameData.Get<UniversalLightData>();
  1312. UniversalRenderingData data = frameData.Get<UniversalRenderingData>();
  1313. data.supportsDynamicBatching = settings.supportsDynamicBatching;
  1314. data.perObjectData = GetPerObjectLightFlags(universalLightData.additionalLightsCount, isForwardPlus);
  1315. // Render graph does not support RenderingData.commandBuffer as its execution timeline might break.
  1316. // RenderingData.commandBuffer is available only for the old non-RG execute code path.
  1317. if(useRenderGraph)
  1318. data.m_CommandBuffer = null;
  1319. else
  1320. data.m_CommandBuffer = cmd;
  1321. UniversalRenderer universalRenderer = renderer as UniversalRenderer;
  1322. if (universalRenderer != null)
  1323. {
  1324. data.renderingMode = universalRenderer.renderingModeActual;
  1325. data.opaqueLayerMask = universalRenderer.opaqueLayerMask;
  1326. data.transparentLayerMask = universalRenderer.transparentLayerMask;
  1327. }
  1328. return data;
  1329. }
  1330. static UniversalShadowData CreateShadowData(ContextContainer frameData, UniversalRenderPipelineAsset urpAsset, bool isForwardPlus)
  1331. {
  1332. using var profScope = new ProfilingScope(Profiling.Pipeline.initializeShadowData);
  1333. // Initial setup
  1334. // ------------------------------------------------------
  1335. UniversalShadowData shadowData = frameData.Create<UniversalShadowData>();
  1336. UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
  1337. UniversalLightData lightData = frameData.Get<UniversalLightData>();
  1338. m_ShadowBiasData.Clear();
  1339. m_ShadowResolutionData.Clear();
  1340. shadowData.shadowmapDepthBufferBits = 16;
  1341. shadowData.mainLightShadowCascadeBorder = urpAsset.cascadeBorder;
  1342. shadowData.mainLightShadowCascadesCount = urpAsset.shadowCascadeCount;
  1343. shadowData.mainLightShadowCascadesSplit = GetMainLightCascadeSplit(shadowData.mainLightShadowCascadesCount, urpAsset);
  1344. shadowData.mainLightShadowmapWidth = urpAsset.mainLightShadowmapResolution;
  1345. shadowData.mainLightShadowmapHeight = urpAsset.mainLightShadowmapResolution;
  1346. shadowData.additionalLightsShadowmapWidth = shadowData.additionalLightsShadowmapHeight = urpAsset.additionalLightsShadowmapResolution;
  1347. // This will be setup in AdditionalLightsShadowCasterPass.
  1348. shadowData.isKeywordAdditionalLightShadowsEnabled = false;
  1349. shadowData.isKeywordSoftShadowsEnabled = false;
  1350. // Those fields must be setup after ApplyAdaptivePerformance is called on RenderingData.
  1351. // This is because this function can currently modify mainLightShadowmapWidth, mainLightShadowmapHeight and mainLightShadowCascadesCount.
  1352. // All three parameters are needed to compute those fields, so their initialization is deferred to InitializeMainLightShadowResolution.
  1353. shadowData.mainLightShadowResolution = 0;
  1354. shadowData.mainLightRenderTargetWidth = 0;
  1355. shadowData.mainLightRenderTargetHeight = 0;
  1356. // Those two fields must be initialized using ShadowData, which can be modified right after this function (InitializeRenderingData) by ApplyAdaptivePerformance.
  1357. // Their initializations is thus deferred to a later point when ShadowData is fully initialized.
  1358. shadowData.shadowAtlasLayout = default;
  1359. shadowData.visibleLightsShadowCullingInfos = default;
  1360. // Setup data that requires iterating over lights
  1361. // ------------------------------------------------------
  1362. var mainLightIndex = lightData.mainLightIndex;
  1363. var visibleLights = lightData.visibleLights;
  1364. // maxShadowDistance is set to 0.0f when the Render Shadows toggle is disabled on the camera
  1365. bool cameraRenderShadows = cameraData.maxShadowDistance > 0.0f;
  1366. shadowData.mainLightShadowsEnabled = urpAsset.supportsMainLightShadows && urpAsset.mainLightRenderingMode == LightRenderingMode.PerPixel;
  1367. shadowData.supportsMainLightShadows = SystemInfo.supportsShadows && shadowData.mainLightShadowsEnabled && cameraRenderShadows;
  1368. shadowData.additionalLightShadowsEnabled = urpAsset.supportsAdditionalLightShadows && (urpAsset.additionalLightsRenderingMode == LightRenderingMode.PerPixel || isForwardPlus);
  1369. shadowData.supportsAdditionalLightShadows = SystemInfo.supportsShadows && shadowData.additionalLightShadowsEnabled && !lightData.shadeAdditionalLightsPerVertex && cameraRenderShadows;
  1370. // Early out if shadows are not rendered...
  1371. if (!shadowData.supportsMainLightShadows && !shadowData.supportsAdditionalLightShadows)
  1372. return shadowData;
  1373. shadowData.supportsMainLightShadows &= mainLightIndex != -1
  1374. && visibleLights[mainLightIndex].light != null
  1375. && visibleLights[mainLightIndex].light.shadows != LightShadows.None;
  1376. if (shadowData.supportsAdditionalLightShadows)
  1377. {
  1378. // Check if there is at least one additional light casting shadows...
  1379. bool additionalLightsCastShadows = false;
  1380. for (int i = 0; i < visibleLights.Length; ++i)
  1381. {
  1382. if (i == mainLightIndex)
  1383. continue;
  1384. ref VisibleLight vl = ref visibleLights.UnsafeElementAtMutable(i);
  1385. // UniversalRP doesn't support additional directional light shadows yet
  1386. if (vl.lightType == LightType.Spot || vl.lightType == LightType.Point)
  1387. {
  1388. Light light = vl.light;
  1389. if (light == null || light.shadows == LightShadows.None)
  1390. continue;
  1391. additionalLightsCastShadows = true;
  1392. break;
  1393. }
  1394. }
  1395. shadowData.supportsAdditionalLightShadows &= additionalLightsCastShadows;
  1396. }
  1397. // Check again if it's possible to early out...
  1398. if (!shadowData.supportsMainLightShadows && !shadowData.supportsAdditionalLightShadows)
  1399. return shadowData;
  1400. for (int i = 0; i < visibleLights.Length; ++i)
  1401. {
  1402. if (!shadowData.supportsMainLightShadows && i == mainLightIndex)
  1403. {
  1404. m_ShadowBiasData.Add(Vector4.zero);
  1405. m_ShadowResolutionData.Add(0);
  1406. continue;
  1407. }
  1408. if (!shadowData.supportsAdditionalLightShadows && i != mainLightIndex)
  1409. {
  1410. m_ShadowBiasData.Add(Vector4.zero);
  1411. m_ShadowResolutionData.Add(0);
  1412. continue;
  1413. }
  1414. ref VisibleLight vl = ref visibleLights.UnsafeElementAtMutable(i);
  1415. Light light = vl.light;
  1416. UniversalAdditionalLightData data = null;
  1417. if (light != null)
  1418. {
  1419. light.gameObject.TryGetComponent(out data);
  1420. }
  1421. if (data && !data.usePipelineSettings)
  1422. m_ShadowBiasData.Add(new Vector4(light.shadowBias, light.shadowNormalBias, 0.0f, 0.0f));
  1423. else
  1424. m_ShadowBiasData.Add(new Vector4(urpAsset.shadowDepthBias, urpAsset.shadowNormalBias, 0.0f, 0.0f));
  1425. if (data && (data.additionalLightsShadowResolutionTier == UniversalAdditionalLightData.AdditionalLightsShadowResolutionTierCustom))
  1426. {
  1427. m_ShadowResolutionData.Add((int)light.shadowResolution); // native code does not clamp light.shadowResolution between -1 and 3
  1428. }
  1429. else if (data && (data.additionalLightsShadowResolutionTier != UniversalAdditionalLightData.AdditionalLightsShadowResolutionTierCustom))
  1430. {
  1431. int resolutionTier = Mathf.Clamp(data.additionalLightsShadowResolutionTier, UniversalAdditionalLightData.AdditionalLightsShadowResolutionTierLow, UniversalAdditionalLightData.AdditionalLightsShadowResolutionTierHigh);
  1432. m_ShadowResolutionData.Add(urpAsset.GetAdditionalLightsShadowResolution(resolutionTier));
  1433. }
  1434. else
  1435. {
  1436. m_ShadowResolutionData.Add(urpAsset.GetAdditionalLightsShadowResolution(UniversalAdditionalLightData.AdditionalLightsShadowDefaultResolutionTier));
  1437. }
  1438. }
  1439. shadowData.bias = m_ShadowBiasData;
  1440. shadowData.resolution = m_ShadowResolutionData;
  1441. shadowData.supportsSoftShadows = urpAsset.supportsSoftShadows && (shadowData.supportsMainLightShadows || shadowData.supportsAdditionalLightShadows);
  1442. return shadowData;
  1443. }
  1444. private static Vector3 GetMainLightCascadeSplit(int mainLightShadowCascadesCount, UniversalRenderPipelineAsset urpAsset)
  1445. {
  1446. switch (mainLightShadowCascadesCount)
  1447. {
  1448. case 1: return new Vector3(1.0f, 0.0f, 0.0f);
  1449. case 2: return new Vector3(urpAsset.cascade2Split, 1.0f, 0.0f);
  1450. case 3: return urpAsset.cascade3Split;
  1451. default: return urpAsset.cascade4Split;
  1452. }
  1453. }
  1454. static void InitializeMainLightShadowResolution(UniversalShadowData shadowData)
  1455. {
  1456. shadowData.mainLightShadowResolution = ShadowUtils.GetMaxTileResolutionInAtlas(shadowData.mainLightShadowmapWidth, shadowData.mainLightShadowmapHeight, shadowData.mainLightShadowCascadesCount);
  1457. shadowData.mainLightRenderTargetWidth = shadowData.mainLightShadowmapWidth;
  1458. shadowData.mainLightRenderTargetHeight = (shadowData.mainLightShadowCascadesCount == 2) ? shadowData.mainLightShadowmapHeight >> 1 : shadowData.mainLightShadowmapHeight;
  1459. }
  1460. static UniversalPostProcessingData CreatePostProcessingData(ContextContainer frameData, UniversalRenderPipelineAsset settings)
  1461. {
  1462. UniversalPostProcessingData postProcessingData = frameData.Create<UniversalPostProcessingData>();
  1463. UniversalCameraData cameraData = frameData.Get<UniversalCameraData>();
  1464. postProcessingData.isEnabled = cameraData.stackAnyPostProcessingEnabled;
  1465. postProcessingData.gradingMode = settings.supportsHDR
  1466. ? settings.colorGradingMode
  1467. : ColorGradingMode.LowDynamicRange;
  1468. if (cameraData.stackLastCameraOutputToHDR)
  1469. postProcessingData.gradingMode = ColorGradingMode.HighDynamicRange;
  1470. postProcessingData.lutSize = settings.colorGradingLutSize;
  1471. postProcessingData.useFastSRGBLinearConversion = settings.useFastSRGBLinearConversion;
  1472. postProcessingData.supportScreenSpaceLensFlare = settings.supportScreenSpaceLensFlare;
  1473. postProcessingData.supportDataDrivenLensFlare = settings.supportDataDrivenLensFlare;
  1474. return postProcessingData;
  1475. }
  1476. static UniversalResourceData CreateUniversalResourceData(ContextContainer frameData)
  1477. {
  1478. return frameData.Create<UniversalResourceData>();
  1479. }
  1480. static UniversalLightData CreateLightData(ContextContainer frameData, UniversalRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights)
  1481. {
  1482. using var profScope = new ProfilingScope(Profiling.Pipeline.initializeLightData);
  1483. UniversalLightData lightData = frameData.Create<UniversalLightData>();
  1484. lightData.mainLightIndex = GetMainLightIndex(settings, visibleLights);
  1485. if (settings.additionalLightsRenderingMode != LightRenderingMode.Disabled)
  1486. {
  1487. lightData.additionalLightsCount = Math.Min((lightData.mainLightIndex != -1) ? visibleLights.Length - 1 : visibleLights.Length, maxVisibleAdditionalLights);
  1488. lightData.maxPerObjectAdditionalLightsCount = Math.Min(settings.maxAdditionalLightsCount, maxPerObjectLights);
  1489. }
  1490. else
  1491. {
  1492. lightData.additionalLightsCount = 0;
  1493. lightData.maxPerObjectAdditionalLightsCount = 0;
  1494. }
  1495. lightData.supportsAdditionalLights = settings.additionalLightsRenderingMode != LightRenderingMode.Disabled;
  1496. lightData.shadeAdditionalLightsPerVertex = settings.additionalLightsRenderingMode == LightRenderingMode.PerVertex;
  1497. lightData.visibleLights = visibleLights;
  1498. lightData.supportsMixedLighting = settings.supportsMixedLighting;
  1499. lightData.reflectionProbeBlending = settings.reflectionProbeBlending;
  1500. lightData.reflectionProbeBoxProjection = settings.reflectionProbeBoxProjection;
  1501. lightData.supportsLightLayers = RenderingUtils.SupportsLightLayers(SystemInfo.graphicsDeviceType) && settings.useRenderingLayers;
  1502. return lightData;
  1503. }
  1504. private static void ApplyTaaRenderingDebugOverrides(ref TemporalAA.Settings taaSettings)
  1505. {
  1506. var debugDisplaySettings = UniversalRenderPipelineDebugDisplaySettings.Instance;
  1507. DebugDisplaySettingsRendering renderingSettings = debugDisplaySettings.renderingSettings;
  1508. switch (renderingSettings.taaDebugMode)
  1509. {
  1510. case DebugDisplaySettingsRendering.TaaDebugMode.ShowClampedHistory:
  1511. taaSettings.m_FrameInfluence = 0;
  1512. break;
  1513. case DebugDisplaySettingsRendering.TaaDebugMode.ShowRawFrame:
  1514. taaSettings.m_FrameInfluence = 1;
  1515. break;
  1516. case DebugDisplaySettingsRendering.TaaDebugMode.ShowRawFrameNoJitter:
  1517. taaSettings.m_FrameInfluence = 1;
  1518. taaSettings.jitterScale = 0;
  1519. break;
  1520. }
  1521. }
  1522. private static void UpdateTemporalAAData(UniversalCameraData cameraData, UniversalAdditionalCameraData additionalCameraData)
  1523. {
  1524. // Always request the TAA history data here in order to fit the existing URP structure.
  1525. additionalCameraData.historyManager.RequestAccess<TaaHistory>();
  1526. cameraData.taaHistory = additionalCameraData.historyManager.GetHistoryForWrite<TaaHistory>();
  1527. if (cameraData.IsSTPEnabled())
  1528. {
  1529. additionalCameraData.historyManager.RequestAccess<StpHistory>();
  1530. cameraData.stpHistory = additionalCameraData.historyManager.GetHistoryForWrite<StpHistory>();
  1531. }
  1532. // Update TAA settings
  1533. ref var taaSettings = ref additionalCameraData.taaSettings;
  1534. cameraData.taaSettings = taaSettings;
  1535. // Decrease history clear counter. Typically clear is only 1 frame, but can be many for XR multipass eyes!
  1536. taaSettings.resetHistoryFrames -= taaSettings.resetHistoryFrames > 0 ? 1 : 0;
  1537. }
  1538. private static void UpdateTemporalAATargets(UniversalCameraData cameraData)
  1539. {
  1540. if (cameraData.IsTemporalAAEnabled())
  1541. {
  1542. bool xrMultipassEnabled = false;
  1543. #if ENABLE_VR && ENABLE_XR_MODULE
  1544. xrMultipassEnabled = cameraData.xr.enabled && !cameraData.xr.singlePassEnabled;
  1545. #endif
  1546. bool allocation;
  1547. if (cameraData.IsSTPEnabled())
  1548. {
  1549. Debug.Assert(cameraData.stpHistory != null);
  1550. // When STP is active, we don't require the full set of resources needed by TAA.
  1551. cameraData.taaHistory.Reset();
  1552. allocation = cameraData.stpHistory.Update(cameraData);
  1553. }
  1554. else
  1555. {
  1556. allocation = cameraData.taaHistory.Update(ref cameraData.cameraTargetDescriptor, xrMultipassEnabled);
  1557. }
  1558. // Fill new history with current frame
  1559. // XR Multipass renders a "frame" per eye
  1560. if (allocation)
  1561. cameraData.taaSettings.resetHistoryFrames += xrMultipassEnabled ? 2 : 1;
  1562. }
  1563. else
  1564. {
  1565. cameraData.taaHistory.Reset(); // TAA GPUResources is explicitly released if the feature is turned off. We could refactor this to rely on the type request and the "gc" only.
  1566. // In the case where STP is enabled, but TAA gets disabled for various reasons, we should release the STP history resources
  1567. if (cameraData.IsSTPEnabled())
  1568. cameraData.stpHistory.Reset();
  1569. }
  1570. }
  1571. static void UpdateCameraStereoMatrices(Camera camera, XRPass xr)
  1572. {
  1573. #if ENABLE_VR && ENABLE_XR_MODULE
  1574. if (xr.enabled)
  1575. {
  1576. if (xr.singlePassEnabled)
  1577. {
  1578. for (int i = 0; i < Mathf.Min(2, xr.viewCount); i++)
  1579. {
  1580. camera.SetStereoProjectionMatrix((Camera.StereoscopicEye)i, xr.GetProjMatrix(i));
  1581. camera.SetStereoViewMatrix((Camera.StereoscopicEye)i, xr.GetViewMatrix(i));
  1582. }
  1583. }
  1584. else
  1585. {
  1586. camera.SetStereoProjectionMatrix((Camera.StereoscopicEye)xr.multipassId, xr.GetProjMatrix(0));
  1587. camera.SetStereoViewMatrix((Camera.StereoscopicEye)xr.multipassId, xr.GetViewMatrix(0));
  1588. }
  1589. }
  1590. #endif
  1591. }
  1592. static PerObjectData GetPerObjectLightFlags(int additionalLightsCount, bool isForwardPlus)
  1593. {
  1594. using var profScope = new ProfilingScope(Profiling.Pipeline.getPerObjectLightFlags);
  1595. var configuration = PerObjectData.Lightmaps | PerObjectData.LightProbe | PerObjectData.OcclusionProbe | PerObjectData.ShadowMask;
  1596. if (!isForwardPlus)
  1597. {
  1598. configuration |= PerObjectData.ReflectionProbes | PerObjectData.LightData;
  1599. }
  1600. if (additionalLightsCount > 0 && !isForwardPlus)
  1601. {
  1602. // In this case we also need per-object indices (unity_LightIndices)
  1603. if (!RenderingUtils.useStructuredBuffer)
  1604. configuration |= PerObjectData.LightIndices;
  1605. }
  1606. return configuration;
  1607. }
  1608. // Main Light is always a directional light
  1609. static int GetMainLightIndex(UniversalRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights)
  1610. {
  1611. using var profScope = new ProfilingScope(Profiling.Pipeline.getMainLightIndex);
  1612. int totalVisibleLights = visibleLights.Length;
  1613. if (totalVisibleLights == 0 || settings.mainLightRenderingMode != LightRenderingMode.PerPixel)
  1614. return -1;
  1615. Light sunLight = RenderSettings.sun;
  1616. int brightestDirectionalLightIndex = -1;
  1617. float brightestLightIntensity = 0.0f;
  1618. for (int i = 0; i < totalVisibleLights; ++i)
  1619. {
  1620. ref VisibleLight currVisibleLight = ref visibleLights.UnsafeElementAtMutable(i);
  1621. Light currLight = currVisibleLight.light;
  1622. // Particle system lights have the light property as null. We sort lights so all particles lights
  1623. // come last. Therefore, if first light is particle light then all lights are particle lights.
  1624. // In this case we either have no main light or already found it.
  1625. if (currLight == null)
  1626. break;
  1627. if (currVisibleLight.lightType == LightType.Directional)
  1628. {
  1629. // Sun source needs be a directional light
  1630. if (currLight == sunLight)
  1631. return i;
  1632. // In case no sun light is present we will return the brightest directional light
  1633. if (currLight.intensity > brightestLightIntensity)
  1634. {
  1635. brightestLightIntensity = currLight.intensity;
  1636. brightestDirectionalLightIndex = i;
  1637. }
  1638. }
  1639. }
  1640. return brightestDirectionalLightIndex;
  1641. }
  1642. void SetupPerFrameShaderConstants()
  1643. {
  1644. using var profScope = new ProfilingScope(Profiling.Pipeline.setupPerFrameShaderConstants);
  1645. // Required for 2D Unlit Shadergraph master node as it doesn't currently support hidden properties.
  1646. Shader.SetGlobalColor(ShaderPropertyId.rendererColor, Color.white);
  1647. Texture2D ditheringTexture = null;
  1648. switch (asset.lodCrossFadeDitheringType)
  1649. {
  1650. case LODCrossFadeDitheringType.BayerMatrix:
  1651. ditheringTexture = runtimeTextures.bayerMatrixTex;
  1652. break;
  1653. case LODCrossFadeDitheringType.BlueNoise:
  1654. ditheringTexture = runtimeTextures.blueNoise64LTex;
  1655. break;
  1656. default:
  1657. Debug.LogWarning($"This Lod Cross Fade Dithering Type is not supported: {asset.lodCrossFadeDitheringType}");
  1658. break;
  1659. }
  1660. if (ditheringTexture != null)
  1661. {
  1662. Shader.SetGlobalFloat(ShaderPropertyId.ditheringTextureInvSize, 1.0f / ditheringTexture.width);
  1663. Shader.SetGlobalTexture(ShaderPropertyId.ditheringTexture, ditheringTexture);
  1664. }
  1665. }
  1666. static void SetupPerCameraShaderConstants(CommandBuffer cmd)
  1667. {
  1668. using var profScope = new ProfilingScope(Profiling.Pipeline.setupPerCameraShaderConstants);
  1669. // When glossy reflections are OFF in the shader we set a constant color to use as indirect specular
  1670. SphericalHarmonicsL2 ambientSH = RenderSettings.ambientProbe;
  1671. Color linearGlossyEnvColor = new Color(ambientSH[0, 0], ambientSH[1, 0], ambientSH[2, 0]) * RenderSettings.reflectionIntensity;
  1672. Color glossyEnvColor = CoreUtils.ConvertLinearToActiveColorSpace(linearGlossyEnvColor);
  1673. cmd.SetGlobalVector(ShaderPropertyId.glossyEnvironmentColor, glossyEnvColor);
  1674. // Used as fallback cubemap for reflections
  1675. cmd.SetGlobalTexture(ShaderPropertyId.glossyEnvironmentCubeMap, ReflectionProbe.defaultTexture);
  1676. cmd.SetGlobalVector(ShaderPropertyId.glossyEnvironmentCubeMapHDR, ReflectionProbe.defaultTextureHDRDecodeValues);
  1677. // Ambient
  1678. cmd.SetGlobalVector(ShaderPropertyId.ambientSkyColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.ambientSkyColor));
  1679. cmd.SetGlobalVector(ShaderPropertyId.ambientEquatorColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.ambientEquatorColor));
  1680. cmd.SetGlobalVector(ShaderPropertyId.ambientGroundColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.ambientGroundColor));
  1681. // Used when subtractive mode is selected
  1682. cmd.SetGlobalVector(ShaderPropertyId.subtractiveShadowColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.subtractiveShadowColor));
  1683. }
  1684. static void CheckAndApplyDebugSettings(ref RenderingData renderingData)
  1685. {
  1686. var debugDisplaySettings = UniversalRenderPipelineDebugDisplaySettings.Instance;
  1687. ref CameraData cameraData = ref renderingData.cameraData;
  1688. if (debugDisplaySettings.AreAnySettingsActive && !cameraData.isPreviewCamera)
  1689. {
  1690. DebugDisplaySettingsRendering renderingSettings = debugDisplaySettings.renderingSettings;
  1691. int msaaSamples = cameraData.cameraTargetDescriptor.msaaSamples;
  1692. if (!renderingSettings.enableMsaa)
  1693. msaaSamples = 1;
  1694. if (!renderingSettings.enableHDR)
  1695. cameraData.isHdrEnabled = false;
  1696. if (!debugDisplaySettings.IsPostProcessingAllowed)
  1697. cameraData.postProcessEnabled = false;
  1698. cameraData.hdrColorBufferPrecision = asset ? asset.hdrColorBufferPrecision : HDRColorBufferPrecision._32Bits;
  1699. cameraData.cameraTargetDescriptor.graphicsFormat = MakeRenderTextureGraphicsFormat(cameraData.isHdrEnabled, cameraData.hdrColorBufferPrecision, true);
  1700. cameraData.cameraTargetDescriptor.msaaSamples = msaaSamples;
  1701. }
  1702. }
  1703. /// <summary>
  1704. /// Returns the best supported image upscaling filter based on the provided upscaling filter selection
  1705. /// </summary>
  1706. /// <param name="imageSize">Size of the final image</param>
  1707. /// <param name="renderScale">Scale being applied to the final image size</param>
  1708. /// <param name="selection">Upscaling filter selected by the user</param>
  1709. /// <returns>Either the original filter provided, or the best replacement available</returns>
  1710. static ImageUpscalingFilter ResolveUpscalingFilterSelection(Vector2 imageSize, float renderScale, UpscalingFilterSelection selection, bool enableRenderGraph)
  1711. {
  1712. // By default we just use linear filtering since it's the most compatible choice
  1713. ImageUpscalingFilter filter = ImageUpscalingFilter.Linear;
  1714. // Fall back to the automatic filter if the selected filter isn't supported on the current platform or rendering environment
  1715. if (((selection == UpscalingFilterSelection.FSR) && (!FSRUtils.IsSupported()))
  1716. || ((selection == UpscalingFilterSelection.STP) && (!STP.IsSupported() || !enableRenderGraph))
  1717. )
  1718. {
  1719. selection = UpscalingFilterSelection.Auto;
  1720. }
  1721. switch (selection)
  1722. {
  1723. case UpscalingFilterSelection.Auto:
  1724. {
  1725. // The user selected "auto" for their upscaling filter so we should attempt to choose the best filter
  1726. // for the current situation. When the current resolution and render scale are compatible with integer
  1727. // scaling we use the point sampling filter. Otherwise we just use the default filter (linear).
  1728. float pixelScale = (1.0f / renderScale);
  1729. bool isIntegerScale = Mathf.Approximately((pixelScale - Mathf.Floor(pixelScale)), 0.0f);
  1730. if (isIntegerScale)
  1731. {
  1732. float widthScale = (imageSize.x / pixelScale);
  1733. float heightScale = (imageSize.y / pixelScale);
  1734. bool isImageCompatible = (Mathf.Approximately((widthScale - Mathf.Floor(widthScale)), 0.0f) &&
  1735. Mathf.Approximately((heightScale - Mathf.Floor(heightScale)), 0.0f));
  1736. if (isImageCompatible)
  1737. {
  1738. filter = ImageUpscalingFilter.Point;
  1739. }
  1740. }
  1741. break;
  1742. }
  1743. case UpscalingFilterSelection.Linear:
  1744. {
  1745. // Do nothing since linear is already the default
  1746. break;
  1747. }
  1748. case UpscalingFilterSelection.Point:
  1749. {
  1750. filter = ImageUpscalingFilter.Point;
  1751. break;
  1752. }
  1753. case UpscalingFilterSelection.FSR:
  1754. {
  1755. filter = ImageUpscalingFilter.FSR;
  1756. break;
  1757. }
  1758. case UpscalingFilterSelection.STP:
  1759. {
  1760. filter = ImageUpscalingFilter.STP;
  1761. break;
  1762. }
  1763. }
  1764. return filter;
  1765. }
  1766. /// <summary>
  1767. /// Checks if the hardware (main display and platform) and the render pipeline support HDR.
  1768. /// </summary>
  1769. /// <returns>True if the main display and platform support HDR and HDR output is enabled on the platform.</returns>
  1770. internal static bool HDROutputForMainDisplayIsActive()
  1771. {
  1772. bool hdrOutputSupported = SystemInfo.hdrDisplaySupportFlags.HasFlag(HDRDisplaySupportFlags.Supported) && asset.supportsHDR;
  1773. bool hdrOutputActive = HDROutputSettings.main.available && HDROutputSettings.main.active;
  1774. return hdrOutputSupported && hdrOutputActive;
  1775. }
  1776. /// <summary>
  1777. /// Checks if any of the display devices we can output to are HDR capable and enabled.
  1778. /// </summary>
  1779. /// <returns>Return true if any of the display devices we can output HDR to have enabled HDR output</returns>
  1780. internal static bool HDROutputForAnyDisplayIsActive()
  1781. {
  1782. bool hdrDisplayOutputActive = HDROutputForMainDisplayIsActive();
  1783. #if ENABLE_VR && ENABLE_XR_MODULE
  1784. // If we are rendering to xr then we need to look at the XR Display rather than the main non-xr display.
  1785. if (XRSystem.displayActive)
  1786. {
  1787. hdrDisplayOutputActive |= XRSystem.isHDRDisplayOutputActive;
  1788. }
  1789. #endif
  1790. return hdrDisplayOutputActive;
  1791. }
  1792. // We only want to enable HDR Output for the game view once
  1793. // since the game itself might want to control this
  1794. internal bool enableHDROnce = true;
  1795. /// <summary>
  1796. /// Configures the render pipeline to render to HDR output or disables HDR output.
  1797. /// </summary>
  1798. #if UNITY_2021_1_OR_NEWER
  1799. void SetHDRState(List<Camera> cameras)
  1800. #else
  1801. void SetHDRState(Camera[] cameras)
  1802. #endif
  1803. {
  1804. bool hdrOutputActive = HDROutputSettings.main.available && HDROutputSettings.main.active;
  1805. // If the pipeline doesn't support HDR rendering, output to SDR.
  1806. bool supportsSwitchingHDR = SystemInfo.hdrDisplaySupportFlags.HasFlag(HDRDisplaySupportFlags.RuntimeSwitchable);
  1807. bool switchHDRToSDR = supportsSwitchingHDR && !asset.supportsHDR && hdrOutputActive;
  1808. if (switchHDRToSDR)
  1809. {
  1810. HDROutputSettings.main.RequestHDRModeChange(false);
  1811. }
  1812. #if UNITY_EDITOR
  1813. bool requestedHDRModeChange = false;
  1814. // Automatically switch to HDR in the editor if it's available
  1815. if (supportsSwitchingHDR && asset.supportsHDR && PlayerSettings.useHDRDisplay && HDROutputSettings.main.available)
  1816. {
  1817. #if UNITY_2021_1_OR_NEWER
  1818. int cameraCount = cameras.Count;
  1819. #else
  1820. int cameraCount = cameras.Length;
  1821. #endif
  1822. if (cameraCount > 0 && cameras[0].cameraType != CameraType.Game)
  1823. {
  1824. requestedHDRModeChange = hdrOutputActive;
  1825. HDROutputSettings.main.RequestHDRModeChange(false);
  1826. }
  1827. else if (enableHDROnce)
  1828. {
  1829. requestedHDRModeChange = !hdrOutputActive;
  1830. HDROutputSettings.main.RequestHDRModeChange(true);
  1831. enableHDROnce = false;
  1832. }
  1833. }
  1834. if (requestedHDRModeChange || switchHDRToSDR)
  1835. {
  1836. // Repaint scene views and game views so the HDR mode request is applied
  1837. UnityEditorInternal.InternalEditorUtility.RepaintAllViews();
  1838. }
  1839. #endif
  1840. // Make sure HDR auto tonemap is off if the URP is handling it
  1841. if (hdrOutputActive)
  1842. {
  1843. HDROutputSettings.main.automaticHDRTonemapping = false;
  1844. }
  1845. }
  1846. internal static void GetHDROutputLuminanceParameters(HDROutputUtils.HDRDisplayInformation hdrDisplayInformation, ColorGamut hdrDisplayColorGamut, Tonemapping tonemapping, out Vector4 hdrOutputParameters)
  1847. {
  1848. float minNits = hdrDisplayInformation.minToneMapLuminance;
  1849. float maxNits = hdrDisplayInformation.maxToneMapLuminance;
  1850. float paperWhite = hdrDisplayInformation.paperWhiteNits;
  1851. if (!tonemapping.detectPaperWhite.value)
  1852. {
  1853. paperWhite = tonemapping.paperWhite.value;
  1854. }
  1855. if (!tonemapping.detectBrightnessLimits.value)
  1856. {
  1857. minNits = tonemapping.minNits.value;
  1858. maxNits = tonemapping.maxNits.value;
  1859. }
  1860. hdrOutputParameters = new Vector4(minNits, maxNits, paperWhite, 1f / paperWhite);
  1861. }
  1862. internal static void GetHDROutputGradingParameters(Tonemapping tonemapping, out Vector4 hdrOutputParameters)
  1863. {
  1864. int eetfMode = 0;
  1865. float hueShift = 0.0f;
  1866. switch (tonemapping.mode.value)
  1867. {
  1868. case TonemappingMode.Neutral:
  1869. eetfMode = (int)tonemapping.neutralHDRRangeReductionMode.value;
  1870. hueShift = tonemapping.hueShiftAmount.value;
  1871. break;
  1872. case TonemappingMode.ACES:
  1873. eetfMode = (int)tonemapping.acesPreset.value;
  1874. break;
  1875. }
  1876. hdrOutputParameters = new Vector4(eetfMode, hueShift, 0.0f, 0.0f);
  1877. }
  1878. #if ADAPTIVE_PERFORMANCE_2_0_0_OR_NEWER
  1879. static void ApplyAdaptivePerformance(UniversalCameraData cameraData)
  1880. {
  1881. var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder;
  1882. if (AdaptivePerformance.AdaptivePerformanceRenderSettings.SkipFrontToBackSorting)
  1883. cameraData.defaultOpaqueSortFlags = noFrontToBackOpaqueFlags;
  1884. var MaxShadowDistanceMultiplier = AdaptivePerformance.AdaptivePerformanceRenderSettings.MaxShadowDistanceMultiplier;
  1885. cameraData.maxShadowDistance *= MaxShadowDistanceMultiplier;
  1886. var RenderScaleMultiplier = AdaptivePerformance.AdaptivePerformanceRenderSettings.RenderScaleMultiplier;
  1887. cameraData.renderScale *= RenderScaleMultiplier;
  1888. // TODO
  1889. if (!cameraData.xr.enabled)
  1890. {
  1891. cameraData.cameraTargetDescriptor.width = (int)(cameraData.camera.pixelWidth * cameraData.renderScale);
  1892. cameraData.cameraTargetDescriptor.height = (int)(cameraData.camera.pixelHeight * cameraData.renderScale);
  1893. }
  1894. var antialiasingQualityIndex = (int)cameraData.antialiasingQuality - AdaptivePerformance.AdaptivePerformanceRenderSettings.AntiAliasingQualityBias;
  1895. if (antialiasingQualityIndex < 0)
  1896. cameraData.antialiasing = AntialiasingMode.None;
  1897. cameraData.antialiasingQuality = (AntialiasingQuality)Mathf.Clamp(antialiasingQualityIndex, (int)AntialiasingQuality.Low, (int)AntialiasingQuality.High);
  1898. }
  1899. static void ApplyAdaptivePerformance(ContextContainer frameData)
  1900. {
  1901. UniversalRenderingData renderingData = frameData.Get<UniversalRenderingData>();
  1902. UniversalShadowData shadowData = frameData.Get<UniversalShadowData>();
  1903. UniversalPostProcessingData postProcessingData = frameData.Get<UniversalPostProcessingData>();
  1904. if (AdaptivePerformance.AdaptivePerformanceRenderSettings.SkipDynamicBatching)
  1905. renderingData.supportsDynamicBatching = false;
  1906. var MainLightShadowmapResolutionMultiplier = AdaptivePerformance.AdaptivePerformanceRenderSettings.MainLightShadowmapResolutionMultiplier;
  1907. shadowData.mainLightShadowmapWidth = (int)(shadowData.mainLightShadowmapWidth * MainLightShadowmapResolutionMultiplier);
  1908. shadowData.mainLightShadowmapHeight = (int)(shadowData.mainLightShadowmapHeight * MainLightShadowmapResolutionMultiplier);
  1909. var MainLightShadowCascadesCountBias = AdaptivePerformance.AdaptivePerformanceRenderSettings.MainLightShadowCascadesCountBias;
  1910. shadowData.mainLightShadowCascadesCount = Mathf.Clamp(shadowData.mainLightShadowCascadesCount - MainLightShadowCascadesCountBias, 0, 4);
  1911. var shadowQualityIndex = AdaptivePerformance.AdaptivePerformanceRenderSettings.ShadowQualityBias;
  1912. for (int i = 0; i < shadowQualityIndex; i++)
  1913. {
  1914. if (shadowData.supportsSoftShadows)
  1915. {
  1916. shadowData.supportsSoftShadows = false;
  1917. continue;
  1918. }
  1919. if (shadowData.supportsAdditionalLightShadows)
  1920. {
  1921. shadowData.supportsAdditionalLightShadows = false;
  1922. continue;
  1923. }
  1924. if (shadowData.supportsMainLightShadows)
  1925. {
  1926. shadowData.supportsMainLightShadows = false;
  1927. continue;
  1928. }
  1929. break;
  1930. }
  1931. if (AdaptivePerformance.AdaptivePerformanceRenderSettings.LutBias >= 1 && postProcessingData.lutSize == 32)
  1932. postProcessingData.lutSize = 16;
  1933. }
  1934. #endif
  1935. /// <summary>
  1936. /// Data structure describing the data for a specific render request
  1937. /// </summary>
  1938. public class SingleCameraRequest
  1939. {
  1940. /// <summary>
  1941. /// Target texture
  1942. /// </summary>
  1943. public RenderTexture destination = null;
  1944. /// <summary>
  1945. /// Target texture mip level
  1946. /// </summary>
  1947. public int mipLevel = 0;
  1948. /// <summary>
  1949. /// Target texture cubemap face
  1950. /// </summary>
  1951. public CubemapFace face = CubemapFace.Unknown;
  1952. /// <summary>
  1953. /// Target texture slice
  1954. /// </summary>
  1955. public int slice = 0;
  1956. }
  1957. static AdditionalLightsShadowAtlasLayout BuildAdditionalLightsShadowAtlasLayout(UniversalLightData lightData, UniversalShadowData shadowData, UniversalCameraData cameraData)
  1958. {
  1959. using var profScope = new ProfilingScope(Profiling.Pipeline.buildAdditionalLightsShadowAtlasLayout);
  1960. return new AdditionalLightsShadowAtlasLayout(lightData, shadowData, cameraData);
  1961. }
  1962. /// <summary>
  1963. /// Enforce under specific circumstances whether URP or native engine triggers the UI Overlay rendering
  1964. /// </summary>
  1965. static void AdjustUIOverlayOwnership(int cameraCount)
  1966. {
  1967. // If rendering to XR device, we don't render Screen Space UI overlay within SRP as the overlay should not be visible in HMD eyes, only when mirroring (after SRP XR Mirror pass)
  1968. // If there is no camera to render in URP, SS UI overlay also has to be rendered in the engine
  1969. if (XRSystem.displayActive || cameraCount == 0)
  1970. {
  1971. SupportedRenderingFeatures.active.rendersUIOverlay = false;
  1972. }
  1973. else
  1974. {
  1975. // Otherwise we enforce SS UI overlay rendering in URP
  1976. // If needed, users can still request its rendering to be after URP
  1977. // by setting rendersUIOverlay (public API) to false in a callback added to RenderPipelineManager.beginContextRendering
  1978. SupportedRenderingFeatures.active.rendersUIOverlay = true;
  1979. }
  1980. }
  1981. }
  1982. }