Search Unity

  1. Megacity Metro Demo now available. Download now.
    Dismiss Notice
  2. Unity support for visionOS is now available. Learn more in our blog post.
    Dismiss Notice

Question Editor cam stuttering

Discussion in 'Editor & General Support' started by Aviation_Simmer, Jan 18, 2022.

  1. Aviation_Simmer

    Aviation_Simmer

    Joined:
    Aug 30, 2021
    Posts:
    110
    In that code by unity, there is this weired error which I've never heared of. I haven't even found something in the internet. the camera is very weired in the editor mode. but not if I'M in play mode. this is the code:

    ERROR:
    IsCameraProjectionMatrixFlipped is being called outside camera rendering scope.
    UnityEngine.GUIUtility:processEvent (int,intptr,bool&)


    Code (CSharp):
    1. using System;
    2. using System.Collections.Generic;
    3. using Unity.Collections;
    4. using UnityEngine.Scripting.APIUpdating;
    5.  
    6. using UnityEngine.Experimental.GlobalIllumination;
    7. using UnityEngine.Experimental.Rendering;
    8. using Lightmapping = UnityEngine.Experimental.GlobalIllumination.Lightmapping;
    9.  
    10. namespace UnityEngine.Rendering.Universal
    11. {
    12.     [MovedFrom("UnityEngine.Rendering.LWRP")] public enum MixedLightingSetup
    13.     {
    14.         None,
    15.         ShadowMask,
    16.         Subtractive,
    17.     };
    18.  
    19.     [MovedFrom("UnityEngine.Rendering.LWRP")] public struct RenderingData
    20.     {
    21.         public CullingResults cullResults;
    22.         public CameraData cameraData;
    23.         public LightData lightData;
    24.         public ShadowData shadowData;
    25.         public PostProcessingData postProcessingData;
    26.         public bool supportsDynamicBatching;
    27.         public PerObjectData perObjectData;
    28.  
    29.         /// <summary>
    30.         /// True if post-processing effect is enabled while rendering the camera stack.
    31.         /// </summary>
    32.         public bool postProcessingEnabled;
    33.     }
    34.  
    35.     [MovedFrom("UnityEngine.Rendering.LWRP")] public struct LightData
    36.     {
    37.         public int mainLightIndex;
    38.         public int additionalLightsCount;
    39.         public int maxPerObjectAdditionalLightsCount;
    40.         public NativeArray<VisibleLight> visibleLights;
    41.         public bool shadeAdditionalLightsPerVertex;
    42.         public bool supportsMixedLighting;
    43.     }
    44.  
    45.     [MovedFrom("UnityEngine.Rendering.LWRP")] public struct CameraData
    46.     {
    47.         // Internal camera data as we are not yet sure how to expose View in stereo context.
    48.         // We might change this API soon.
    49.         Matrix4x4 m_ViewMatrix;
    50.         Matrix4x4 m_ProjectionMatrix;
    51.  
    52.         internal void SetViewAndProjectionMatrix(Matrix4x4 viewMatrix, Matrix4x4 projectionMatrix)
    53.         {
    54.             m_ViewMatrix = viewMatrix;
    55.             m_ProjectionMatrix = projectionMatrix;
    56.         }
    57.  
    58.         /// <summary>
    59.         /// Returns the camera view matrix.
    60.         /// </summary>
    61.         /// <returns></returns>
    62.         public Matrix4x4 GetViewMatrix(int viewIndex = 0)
    63.         {
    64. #if ENABLE_VR && ENABLE_XR_MODULE
    65.             if (xr.enabled)
    66.                 return xr.GetViewMatrix(viewIndex);
    67. #endif
    68.             return m_ViewMatrix;
    69.         }
    70.  
    71.         /// <summary>
    72.         /// Returns the camera projection matrix.
    73.         /// </summary>
    74.         /// <returns></returns>
    75.         public Matrix4x4 GetProjectionMatrix(int viewIndex = 0)
    76.         {
    77. #if ENABLE_VR && ENABLE_XR_MODULE
    78.             if (xr.enabled)
    79.                 return xr.GetProjMatrix(viewIndex);
    80. #endif
    81.             return m_ProjectionMatrix;
    82.         }
    83.  
    84.         /// <summary>
    85.         /// Returns the camera GPU projection matrix. This contains platform specific changes to handle y-flip and reverse z.
    86.         /// Similar to <c>GL.GetGPUProjectionMatrix</c> but queries URP internal state to know if the pipeline is rendering to render texture.
    87.         /// For more info on platform differences regarding camera projection check: https://docs.unity3d.com/Manual/SL-PlatformDifferences.html
    88.         /// </summary>
    89.         /// <seealso cref="GL.GetGPUProjectionMatrix(Matrix4x4, bool)"/>
    90.         /// <returns></returns>
    91.         public Matrix4x4 GetGPUProjectionMatrix(int viewIndex = 0)
    92.         {
    93.             return GL.GetGPUProjectionMatrix(GetProjectionMatrix(viewIndex), IsCameraProjectionMatrixFlipped());
    94.         }
    95.  
    96.         public Camera camera;
    97.         public CameraRenderType renderType;
    98.         public RenderTexture targetTexture;
    99.         public RenderTextureDescriptor cameraTargetDescriptor;
    100.         internal Rect pixelRect;
    101.         internal int pixelWidth;
    102.         internal int pixelHeight;
    103.         internal float aspectRatio;
    104.         public float renderScale;
    105.         public bool clearDepth;
    106.         public CameraType cameraType;
    107.         public bool isDefaultViewport;
    108.         public bool isHdrEnabled;
    109.         public bool requiresDepthTexture;
    110.         public bool requiresOpaqueTexture;
    111. #if ENABLE_VR && ENABLE_XR_MODULE
    112.         public bool xrRendering;
    113. #endif
    114.         internal bool requireSrgbConversion
    115.         {
    116.             get
    117.             {
    118. #if ENABLE_VR && ENABLE_XR_MODULE
    119.                 if (xr.enabled)
    120.                     return !xr.renderTargetDesc.sRGB && (QualitySettings.activeColorSpace == ColorSpace.Linear);
    121. #endif
    122.  
    123.                 return Display.main.requiresSrgbBlitToBackbuffer;
    124.             }
    125.         }
    126.  
    127.         /// <summary>
    128.         /// True if the camera rendering is for the scene window in the editor
    129.         /// </summary>
    130.         public bool isSceneViewCamera => cameraType == CameraType.SceneView;
    131.  
    132.         /// <summary>
    133.         /// True if the camera rendering is for the preview window in the editor
    134.         /// </summary>
    135.         public bool isPreviewCamera => cameraType == CameraType.Preview;
    136.  
    137.         /// <summary>
    138.         /// True if the camera device projection matrix is flipped. This happens when the pipeline is rendering
    139.         /// to a render texture in non OpenGL platforms. If you are doing a custom Blit pass to copy camera textures
    140.         /// (_CameraColorTexture, _CameraDepthAttachment) you need to check this flag to know if you should flip the
    141.         /// matrix when rendering with for cmd.Draw* and reading from camera textures.
    142.         /// </summary>
    143.         public bool IsCameraProjectionMatrixFlipped()
    144.         {
    145.             // Users only have access to CameraData on URP rendering scope. The current renderer should never be null.
    146.             var renderer = ScriptableRenderer.current;
    147.             Debug.Assert(renderer != null, "IsCameraProjectionMatrixFlipped is being called outside camera rendering scope.");
    148.  
    149.             if (renderer != null)
    150.             {
    151.                 bool renderingToBackBufferTarget = renderer.cameraColorTarget == BuiltinRenderTextureType.CameraTarget;
    152. #if ENABLE_VR && ENABLE_XR_MODULE
    153.                 if (xr.enabled)
    154.                     renderingToBackBufferTarget |= renderer.cameraColorTarget == xr.renderTarget && !xr.renderTargetIsRenderTexture;
    155. #endif
    156.                 bool renderingToTexture = !renderingToBackBufferTarget || targetTexture != null;
    157.                 return SystemInfo.graphicsUVStartsAtTop && renderingToTexture;
    158.             }
    159.  
    160.             return true;
    161.         }
    162.  
    163.         public SortingCriteria defaultOpaqueSortFlags;
    164.  
    165.         internal XRPass xr;
    166.  
    167.         [Obsolete("Please use xr.enabled instead.")]
    168.         public bool isStereoEnabled;
    169.  
    170.         public float maxShadowDistance;
    171.         public bool postProcessEnabled;
    172.  
    173.         public IEnumerator<Action<RenderTargetIdentifier, CommandBuffer>> captureActions;
    174.  
    175.         public LayerMask volumeLayerMask;
    176.         public Transform volumeTrigger;
    177.  
    178.         public bool isStopNaNEnabled;
    179.         public bool isDitheringEnabled;
    180.         public AntialiasingMode antialiasing;
    181.         public AntialiasingQuality antialiasingQuality;
    182.  
    183.         /// <summary>
    184.         /// Returns the current renderer used by this camera.
    185.         /// <see cref="ScriptableRenderer"/>
    186.         /// </summary>
    187.         public ScriptableRenderer renderer;
    188.  
    189.         /// <summary>
    190.         /// True if this camera is resolving rendering to the final camera render target.
    191.         /// When rendering a stack of cameras only the last camera in the stack will resolve to camera target.
    192.         /// </summary>
    193.         public bool resolveFinalTarget;
    194.     }
    195.  
    196.     [MovedFrom("UnityEngine.Rendering.LWRP")] public struct ShadowData
    197.     {
    198.         public bool supportsMainLightShadows;
    199.         [Obsolete("Obsolete, this feature was replaced by new 'ScreenSpaceShadows' renderer feature")]
    200.         public bool requiresScreenSpaceShadowResolve;
    201.         public int mainLightShadowmapWidth;
    202.         public int mainLightShadowmapHeight;
    203.         public int mainLightShadowCascadesCount;
    204.         public Vector3 mainLightShadowCascadesSplit;
    205.         public bool supportsAdditionalLightShadows;
    206.         public int additionalLightsShadowmapWidth;
    207.         public int additionalLightsShadowmapHeight;
    208.         public bool supportsSoftShadows;
    209.         public int shadowmapDepthBufferBits;
    210.         public List<Vector4> bias;
    211.         public List<int> resolution;
    212.     }
    213.  
    214.     // Precomputed tile data.
    215.     public struct PreTile
    216.     {
    217.         // Tile left, right, bottom and top plane equations in view space.
    218.         // Normals are pointing out.
    219.         public Unity.Mathematics.float4 planeLeft;
    220.         public Unity.Mathematics.float4 planeRight;
    221.         public Unity.Mathematics.float4 planeBottom;
    222.         public Unity.Mathematics.float4 planeTop;
    223.     }
    224.  
    225.     // Actual tile data passed to the deferred shaders.
    226.     public struct TileData
    227.     {
    228.         public uint tileID;         // 2x 16 bits
    229.         public uint listBitMask;    // 32 bits
    230.         public uint relLightOffset; // 16 bits is enough
    231.         public uint unused;
    232.     }
    233.  
    234.     // Actual point/spot light data passed to the deferred shaders.
    235.     public struct PunctualLightData
    236.     {
    237.         public Vector3 wsPos;
    238.         public float radius; // TODO remove? included in attenuation
    239.         public Vector4 color;
    240.         public Vector4 attenuation; // .xy are used by DistanceAttenuation - .zw are used by AngleAttenuation (for SpotLights)
    241.         public Vector3 spotDirection;   // for spotLights
    242.         public int lightIndex;
    243.         public Vector4 occlusionProbeInfo;
    244.     }
    245.  
    246.     internal static class ShaderPropertyId
    247.     {
    248.         public static readonly int glossyEnvironmentColor = Shader.PropertyToID("_GlossyEnvironmentColor");
    249.         public static readonly int subtractiveShadowColor = Shader.PropertyToID("_SubtractiveShadowColor");
    250.  
    251.         public static readonly int ambientSkyColor = Shader.PropertyToID("unity_AmbientSky");
    252.         public static readonly int ambientEquatorColor = Shader.PropertyToID("unity_AmbientEquator");
    253.         public static readonly int ambientGroundColor = Shader.PropertyToID("unity_AmbientGround");
    254.  
    255.         public static readonly int time = Shader.PropertyToID("_Time");
    256.         public static readonly int sinTime = Shader.PropertyToID("_SinTime");
    257.         public static readonly int cosTime = Shader.PropertyToID("_CosTime");
    258.         public static readonly int deltaTime = Shader.PropertyToID("unity_DeltaTime");
    259.         public static readonly int timeParameters = Shader.PropertyToID("_TimeParameters");
    260.  
    261.         public static readonly int scaledScreenParams = Shader.PropertyToID("_ScaledScreenParams");
    262.         public static readonly int worldSpaceCameraPos = Shader.PropertyToID("_WorldSpaceCameraPos");
    263.         public static readonly int screenParams = Shader.PropertyToID("_ScreenParams");
    264.         public static readonly int projectionParams = Shader.PropertyToID("_ProjectionParams");
    265.         public static readonly int zBufferParams = Shader.PropertyToID("_ZBufferParams");
    266.         public static readonly int orthoParams = Shader.PropertyToID("unity_OrthoParams");
    267.  
    268.         public static readonly int viewMatrix = Shader.PropertyToID("unity_MatrixV");
    269.         public static readonly int projectionMatrix = Shader.PropertyToID("glstate_matrix_projection");
    270.         public static readonly int viewAndProjectionMatrix = Shader.PropertyToID("unity_MatrixVP");
    271.  
    272.         public static readonly int inverseViewMatrix = Shader.PropertyToID("unity_MatrixInvV");
    273.         public static readonly int inverseProjectionMatrix = Shader.PropertyToID("unity_MatrixInvP");
    274.         public static readonly int inverseViewAndProjectionMatrix = Shader.PropertyToID("unity_MatrixInvVP");
    275.  
    276.         public static readonly int cameraProjectionMatrix = Shader.PropertyToID("unity_CameraProjection");
    277.         public static readonly int inverseCameraProjectionMatrix = Shader.PropertyToID("unity_CameraInvProjection");
    278.         public static readonly int worldToCameraMatrix = Shader.PropertyToID("unity_WorldToCamera");
    279.         public static readonly int cameraToWorldMatrix = Shader.PropertyToID("unity_CameraToWorld");
    280.  
    281.         public static readonly int sourceTex = Shader.PropertyToID("_SourceTex");
    282.         public static readonly int scaleBias = Shader.PropertyToID("_ScaleBias");
    283.         public static readonly int scaleBiasRt = Shader.PropertyToID("_ScaleBiasRt");
    284.  
    285.         // Required for 2D Unlit Shadergraph master node as it doesn't currently support hidden properties.
    286.         public static readonly int rendererColor = Shader.PropertyToID("_RendererColor");
    287.     }
    288.  
    289.     public struct PostProcessingData
    290.     {
    291.         public ColorGradingMode gradingMode;
    292.         public int lutSize;
    293.         /// <summary>
    294.         /// True if fast approximation functions are used when converting between the sRGB and Linear color spaces, false otherwise.
    295.         /// </summary>
    296.         public bool useFastSRGBLinearConversion;
    297.     }
    298.  
    299.     public static class ShaderKeywordStrings
    300.     {
    301.         public static readonly string MainLightShadows = "_MAIN_LIGHT_SHADOWS";
    302.         public static readonly string MainLightShadowCascades = "_MAIN_LIGHT_SHADOWS_CASCADE";
    303.         public static readonly string MainLightShadowScreen = "_MAIN_LIGHT_SHADOWS_SCREEN";
    304.         public static readonly string CastingPunctualLightShadow = "_CASTING_PUNCTUAL_LIGHT_SHADOW"; // This is used during shadow map generation to differentiate between directional and punctual light shadows, as they use different formulas to apply Normal Bias
    305.         public static readonly string AdditionalLightsVertex = "_ADDITIONAL_LIGHTS_VERTEX";
    306.         public static readonly string AdditionalLightsPixel = "_ADDITIONAL_LIGHTS";
    307.         public static readonly string AdditionalLightShadows = "_ADDITIONAL_LIGHT_SHADOWS";
    308.         public static readonly string SoftShadows = "_SHADOWS_SOFT";
    309.         public static readonly string MixedLightingSubtractive = "_MIXED_LIGHTING_SUBTRACTIVE"; // Backward compatibility
    310.         public static readonly string LightmapShadowMixing = "LIGHTMAP_SHADOW_MIXING";
    311.         public static readonly string ShadowsShadowMask = "SHADOWS_SHADOWMASK";
    312.  
    313.         public static readonly string DepthNoMsaa = "_DEPTH_NO_MSAA";
    314.         public static readonly string DepthMsaa2 = "_DEPTH_MSAA_2";
    315.         public static readonly string DepthMsaa4 = "_DEPTH_MSAA_4";
    316.         public static readonly string DepthMsaa8 = "_DEPTH_MSAA_8";
    317.  
    318.         public static readonly string LinearToSRGBConversion = "_LINEAR_TO_SRGB_CONVERSION";
    319.         internal static readonly string UseFastSRGBLinearConversion = "_USE_FAST_SRGB_LINEAR_CONVERSION";
    320.  
    321.         public static readonly string SmaaLow = "_SMAA_PRESET_LOW";
    322.         public static readonly string SmaaMedium = "_SMAA_PRESET_MEDIUM";
    323.         public static readonly string SmaaHigh = "_SMAA_PRESET_HIGH";
    324.         public static readonly string PaniniGeneric = "_GENERIC";
    325.         public static readonly string PaniniUnitDistance = "_UNIT_DISTANCE";
    326.         public static readonly string BloomLQ = "_BLOOM_LQ";
    327.         public static readonly string BloomHQ = "_BLOOM_HQ";
    328.         public static readonly string BloomLQDirt = "_BLOOM_LQ_DIRT";
    329.         public static readonly string BloomHQDirt = "_BLOOM_HQ_DIRT";
    330.         public static readonly string UseRGBM = "_USE_RGBM";
    331.         public static readonly string Distortion = "_DISTORTION";
    332.         public static readonly string ChromaticAberration = "_CHROMATIC_ABERRATION";
    333.         public static readonly string HDRGrading = "_HDR_GRADING";
    334.         public static readonly string TonemapACES = "_TONEMAP_ACES";
    335.         public static readonly string TonemapNeutral = "_TONEMAP_NEUTRAL";
    336.         public static readonly string FilmGrain = "_FILM_GRAIN";
    337.         public static readonly string Fxaa = "_FXAA";
    338.         public static readonly string Dithering = "_DITHERING";
    339.         public static readonly string ScreenSpaceOcclusion = "_SCREEN_SPACE_OCCLUSION";
    340.  
    341.         public static readonly string HighQualitySampling = "_HIGH_QUALITY_SAMPLING";
    342.  
    343.         public static readonly string DOWNSAMPLING_SIZE_2 = "DOWNSAMPLING_SIZE_2";
    344.         public static readonly string DOWNSAMPLING_SIZE_4 = "DOWNSAMPLING_SIZE_4";
    345.         public static readonly string DOWNSAMPLING_SIZE_8 = "DOWNSAMPLING_SIZE_8";
    346.         public static readonly string DOWNSAMPLING_SIZE_16 = "DOWNSAMPLING_SIZE_16";
    347.         public static readonly string _SPOT = "_SPOT";
    348.         public static readonly string _DIRECTIONAL = "_DIRECTIONAL";
    349.         public static readonly string _POINT = "_POINT";
    350.         public static readonly string _DEFERRED_ADDITIONAL_LIGHT_SHADOWS = "_DEFERRED_ADDITIONAL_LIGHT_SHADOWS";
    351.         public static readonly string _GBUFFER_NORMALS_OCT = "_GBUFFER_NORMALS_OCT";
    352.         public static readonly string _DEFERRED_MIXED_LIGHTING = "_DEFERRED_MIXED_LIGHTING";
    353.         public static readonly string LIGHTMAP_ON = "LIGHTMAP_ON";
    354.         public static readonly string _ALPHATEST_ON = "_ALPHATEST_ON";
    355.         public static readonly string DIRLIGHTMAP_COMBINED = "DIRLIGHTMAP_COMBINED";
    356.         public static readonly string _DETAIL_MULX2 = "_DETAIL_MULX2";
    357.         public static readonly string _DETAIL_SCALED = "_DETAIL_SCALED";
    358.         public static readonly string _CLEARCOAT = "_CLEARCOAT";
    359.         public static readonly string _CLEARCOATMAP = "_CLEARCOATMAP";
    360.  
    361.         // XR
    362.         public static readonly string UseDrawProcedural = "_USE_DRAW_PROCEDURAL";
    363.     }
    364.  
    365.     public sealed partial class UniversalRenderPipeline
    366.     {
    367.         // Holds light direction for directional lights or position for punctual lights.
    368.         // When w is set to 1.0, it means it's a punctual light.
    369.         static Vector4 k_DefaultLightPosition = new Vector4(0.0f, 0.0f, 1.0f, 0.0f);
    370.         static Vector4 k_DefaultLightColor = Color.black;
    371.  
    372.         // Default light attenuation is setup in a particular way that it causes
    373.         // directional lights to return 1.0 for both distance and angle attenuation
    374.         static Vector4 k_DefaultLightAttenuation = new Vector4(0.0f, 1.0f, 0.0f, 1.0f);
    375.         static Vector4 k_DefaultLightSpotDirection = new Vector4(0.0f, 0.0f, 1.0f, 0.0f);
    376.         static Vector4 k_DefaultLightsProbeChannel = new Vector4(0.0f, 0.0f, 0.0f, 0.0f);
    377.  
    378.         static List<Vector4> m_ShadowBiasData = new List<Vector4>();
    379.         static List<int> m_ShadowResolutionData = new List<int>();
    380.  
    381.         /// <summary>
    382.         /// Checks if a camera is a game camera.
    383.         /// </summary>
    384.         /// <param name="camera">Camera to check state from.</param>
    385.         /// <returns>true if given camera is a game camera, false otherwise.</returns>
    386.         public static bool IsGameCamera(Camera camera)
    387.         {
    388.             if (camera == null)
    389.                 throw new ArgumentNullException("camera");
    390.  
    391.             return camera.cameraType == CameraType.Game || camera.cameraType == CameraType.VR;
    392.         }
    393.  
    394.         /// <summary>
    395.         /// Checks if a camera is rendering in stereo mode.
    396.         /// </summary>
    397.         /// <param name="camera">Camera to check state from.</param>
    398.         /// <returns>Returns true if the given camera is rendering in stereo mode, false otherwise.</returns>
    399.         [Obsolete("Please use CameraData.xr.enabled instead.")]
    400.         public static bool IsStereoEnabled(Camera camera)
    401.         {
    402.             if (camera == null)
    403.                 throw new ArgumentNullException("camera");
    404.  
    405.             return IsGameCamera(camera) && (camera.stereoTargetEye == StereoTargetEyeMask.Both);
    406.         }
    407.  
    408.         /// <summary>
    409.         /// Returns the current render pipeline asset for the current quality setting.
    410.         /// If no render pipeline asset is assigned in QualitySettings, then returns the one assigned in GraphicsSettings.
    411.         /// </summary>
    412.         public static UniversalRenderPipelineAsset asset
    413.         {
    414.             get => GraphicsSettings.currentRenderPipeline as UniversalRenderPipelineAsset;
    415.         }
    416.  
    417.         /// <summary>
    418.         /// Checks if a camera is rendering in MultiPass stereo mode.
    419.         /// </summary>
    420.         /// <param name="camera">Camera to check state from.</param>
    421.         /// <returns>Returns true if the given camera is rendering in multi pass stereo mode, false otherwise.</returns>
    422.         [Obsolete("Please use CameraData.xr.singlePassEnabled instead.")]
    423.         static bool IsMultiPassStereoEnabled(Camera camera)
    424.         {
    425.             if (camera == null)
    426.                 throw new ArgumentNullException("camera");
    427.  
    428.             return false;
    429.         }
    430.  
    431. #if ENABLE_VR && ENABLE_VR_MODULE
    432.         static List<XR.XRDisplaySubsystem> displaySubsystemList = new List<XR.XRDisplaySubsystem>();
    433.         static XR.XRDisplaySubsystem GetFirstXRDisplaySubsystem()
    434.         {
    435.             XR.XRDisplaySubsystem display = null;
    436.             SubsystemManager.GetInstances(displaySubsystemList);
    437.  
    438.             if (displaySubsystemList.Count > 0)
    439.                 display = displaySubsystemList[0];
    440.  
    441.             return display;
    442.         }
    443.  
    444.         // NB: This method is required for a hotfix in Hololens to prevent creating a render texture when using a renderer
    445.         // with custom render pass.
    446.         // TODO: Remove this method and usages when we have proper dependency tracking in the pipeline to know
    447.         // when a render pass requires camera color as input.
    448.         internal static bool IsRunningHololens(CameraData cameraData)
    449.         {
    450. #if PLATFORM_WINRT
    451.             if (cameraData.xr.enabled)
    452.             {
    453.                 var platform = Application.platform;
    454.                 if (platform == RuntimePlatform.WSAPlayerX86 || platform == RuntimePlatform.WSAPlayerARM || platform == RuntimePlatform.WSAPlayerX64)
    455.                 {
    456.                     var displaySubsystem = GetFirstXRDisplaySubsystem();
    457.  
    458.                     if (displaySubsystem != null && !displaySubsystem.displayOpaque)
    459.                         return true;
    460.                 }
    461.             }
    462. #endif
    463.             return false;
    464.         }
    465.  
    466. #endif
    467.  
    468.         Comparison<Camera> cameraComparison = (camera1, camera2) => { return (int)camera1.depth - (int)camera2.depth; };
    469. #if UNITY_2021_1_OR_NEWER
    470.         void SortCameras(List<Camera> cameras)
    471.         {
    472.             if (cameras.Count > 1)
    473.                 cameras.Sort(cameraComparison);
    474.         }
    475.  
    476. #else
    477.         void SortCameras(Camera[] cameras)
    478.         {
    479.             if (cameras.Length > 1)
    480.                 Array.Sort(cameras, cameraComparison);
    481.         }
    482.  
    483. #endif
    484.  
    485.         static RenderTextureDescriptor CreateRenderTextureDescriptor(Camera camera, float renderScale,
    486.             bool isHdrEnabled, int msaaSamples, bool needsAlpha, bool requiresOpaqueTexture)
    487.         {
    488.             RenderTextureDescriptor desc;
    489.             GraphicsFormat renderTextureFormatDefault = SystemInfo.GetGraphicsFormat(DefaultFormat.LDR);
    490.  
    491.             if (camera.targetTexture == null)
    492.             {
    493.                 desc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
    494.                 desc.width = (int)((float)desc.width * renderScale);
    495.                 desc.height = (int)((float)desc.height * renderScale);
    496.  
    497.  
    498.                 GraphicsFormat hdrFormat;
    499.                 if (!needsAlpha && RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.B10G11R11_UFloatPack32, FormatUsage.Linear | FormatUsage.Render))
    500.                     hdrFormat = GraphicsFormat.B10G11R11_UFloatPack32;
    501.                 else if (RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat, FormatUsage.Linear | FormatUsage.Render))
    502.                     hdrFormat = GraphicsFormat.R16G16B16A16_SFloat;
    503.                 else
    504.                     hdrFormat = SystemInfo.GetGraphicsFormat(DefaultFormat.HDR); // This might actually be a LDR format on old devices.
    505.  
    506.                 desc.graphicsFormat = isHdrEnabled ? hdrFormat : renderTextureFormatDefault;
    507.                 desc.depthBufferBits = 32;
    508.                 desc.msaaSamples = msaaSamples;
    509.                 desc.sRGB = (QualitySettings.activeColorSpace == ColorSpace.Linear);
    510.             }
    511.             else
    512.             {
    513.                 desc = camera.targetTexture.descriptor;
    514.                 desc.width = camera.pixelWidth;
    515.                 desc.height = camera.pixelHeight;
    516.                 if (camera.cameraType == CameraType.SceneView  && !isHdrEnabled)
    517.                 {
    518.                     desc.graphicsFormat = renderTextureFormatDefault;
    519.                 }
    520.                 // SystemInfo.SupportsRenderTextureFormat(camera.targetTexture.descriptor.colorFormat)
    521.                 // will assert on R8_SINT since it isn't a valid value of RenderTextureFormat.
    522.                 // If this is fixed then we can implement debug statement to the user explaining why some
    523.                 // RenderTextureFormats available resolves in a black render texture when no warning or error
    524.                 // is given.
    525.             }
    526.  
    527.             desc.enableRandomWrite = false;
    528.             desc.bindMS = false;
    529.             desc.useDynamicScale = camera.allowDynamicResolution;
    530.  
    531.             // check that the requested MSAA samples count is supported by the current platform. If it's not supported,
    532.             // replace the requested desc.msaaSamples value with the actual value the engine falls back to
    533.             desc.msaaSamples = SystemInfo.GetRenderTextureSupportedMSAASampleCount(desc);
    534.  
    535.             // if the target platform doesn't support storing multisampled RTs and we are doing a separate opaque pass, using a Load load action on the subsequent passes
    536.             // will result in loading Resolved data, which on some platforms is discarded, resulting in losing the results of the previous passes.
    537.             // As a workaround we disable MSAA to make sure that the results of previous passes are stored. (fix for Case 1247423).
    538.             if (!SystemInfo.supportsStoreAndResolveAction && requiresOpaqueTexture)
    539.                 desc.msaaSamples = 1;
    540.  
    541.             return desc;
    542.         }
    543.  
    544.         static Lightmapping.RequestLightsDelegate lightsDelegate = (Light[] requests, NativeArray<LightDataGI> lightsOutput) =>
    545.         {
    546.             // Editor only.
    547. #if UNITY_EDITOR
    548.             LightDataGI lightData = new LightDataGI();
    549.  
    550.             for (int i = 0; i < requests.Length; i++)
    551.             {
    552.                 Light light = requests[i];
    553.                 switch (light.type)
    554.                 {
    555.                     case LightType.Directional:
    556.                         DirectionalLight directionalLight = new DirectionalLight();
    557.                         LightmapperUtils.Extract(light, ref directionalLight);
    558.                         lightData.Init(ref directionalLight);
    559.                         break;
    560.                     case LightType.Point:
    561.                         PointLight pointLight = new PointLight();
    562.                         LightmapperUtils.Extract(light, ref pointLight);
    563.                         lightData.Init(ref pointLight);
    564.                         break;
    565.                     case LightType.Spot:
    566.                         SpotLight spotLight = new SpotLight();
    567.                         LightmapperUtils.Extract(light, ref spotLight);
    568.                         spotLight.innerConeAngle = light.innerSpotAngle * Mathf.Deg2Rad;
    569.                         spotLight.angularFalloff = AngularFalloffType.AnalyticAndInnerAngle;
    570.                         lightData.Init(ref spotLight);
    571.                         break;
    572.                     case LightType.Area:
    573.                         RectangleLight rectangleLight = new RectangleLight();
    574.                         LightmapperUtils.Extract(light, ref rectangleLight);
    575.                         rectangleLight.mode = LightMode.Baked;
    576.                         lightData.Init(ref rectangleLight);
    577.                         break;
    578.                     case LightType.Disc:
    579.                         DiscLight discLight = new DiscLight();
    580.                         LightmapperUtils.Extract(light, ref discLight);
    581.                         discLight.mode = LightMode.Baked;
    582.                         lightData.Init(ref discLight);
    583.                         break;
    584.                     default:
    585.                         lightData.InitNoBake(light.GetInstanceID());
    586.                         break;
    587.                 }
    588.  
    589.                 lightData.falloff = FalloffType.InverseSquared;
    590.                 lightsOutput[i] = lightData;
    591.             }
    592. #else
    593.             LightDataGI lightData = new LightDataGI();
    594.  
    595.             for (int i = 0; i < requests.Length; i++)
    596.             {
    597.                 Light light = requests[i];
    598.                 lightData.InitNoBake(light.GetInstanceID());
    599.                 lightsOutput[i] = lightData;
    600.             }
    601. #endif
    602.         };
    603.  
    604.         // called from DeferredLights.cs too
    605.         public static void GetLightAttenuationAndSpotDirection(
    606.             LightType lightType, float lightRange, Matrix4x4 lightLocalToWorldMatrix,
    607.             float spotAngle, float? innerSpotAngle,
    608.             out Vector4 lightAttenuation, out Vector4 lightSpotDir)
    609.         {
    610.             lightAttenuation = k_DefaultLightAttenuation;
    611.             lightSpotDir = k_DefaultLightSpotDirection;
    612.  
    613.             // Directional Light attenuation is initialize so distance attenuation always be 1.0
    614.             if (lightType != LightType.Directional)
    615.             {
    616.                 // Light attenuation in universal matches the unity vanilla one.
    617.                 // attenuation = 1.0 / distanceToLightSqr
    618.                 // We offer two different smoothing factors.
    619.                 // The smoothing factors make sure that the light intensity is zero at the light range limit.
    620.                 // The first smoothing factor is a linear fade starting at 80 % of the light range.
    621.                 // smoothFactor = (lightRangeSqr - distanceToLightSqr) / (lightRangeSqr - fadeStartDistanceSqr)
    622.                 // We rewrite smoothFactor to be able to pre compute the constant terms below and apply the smooth factor
    623.                 // with one MAD instruction
    624.                 // smoothFactor =  distanceSqr * (1.0 / (fadeDistanceSqr - lightRangeSqr)) + (-lightRangeSqr / (fadeDistanceSqr - lightRangeSqr)
    625.                 //                 distanceSqr *           oneOverFadeRangeSqr             +              lightRangeSqrOverFadeRangeSqr
    626.  
    627.                 // The other smoothing factor matches the one used in the Unity lightmapper but is slower than the linear one.
    628.                 // smoothFactor = (1.0 - saturate((distanceSqr * 1.0 / lightrangeSqr)^2))^2
    629.                 float lightRangeSqr = lightRange * lightRange;
    630.                 float fadeStartDistanceSqr = 0.8f * 0.8f * lightRangeSqr;
    631.                 float fadeRangeSqr = (fadeStartDistanceSqr - lightRangeSqr);
    632.                 float oneOverFadeRangeSqr = 1.0f / fadeRangeSqr;
    633.                 float lightRangeSqrOverFadeRangeSqr = -lightRangeSqr / fadeRangeSqr;
    634.                 float oneOverLightRangeSqr = 1.0f / Mathf.Max(0.0001f, lightRange * lightRange);
    635.  
    636.                 // On mobile and Nintendo Switch: Use the faster linear smoothing factor (SHADER_HINT_NICE_QUALITY).
    637.                 // On other devices: Use the smoothing factor that matches the GI.
    638.                 lightAttenuation.x = Application.isMobilePlatform || SystemInfo.graphicsDeviceType == GraphicsDeviceType.Switch ? oneOverFadeRangeSqr : oneOverLightRangeSqr;
    639.                 lightAttenuation.y = lightRangeSqrOverFadeRangeSqr;
    640.             }
    641.  
    642.             if (lightType == LightType.Spot)
    643.             {
    644.                 Vector4 dir = lightLocalToWorldMatrix.GetColumn(2);
    645.                 lightSpotDir = new Vector4(-dir.x, -dir.y, -dir.z, 0.0f);
    646.  
    647.                 // Spot Attenuation with a linear falloff can be defined as
    648.                 // (SdotL - cosOuterAngle) / (cosInnerAngle - cosOuterAngle)
    649.                 // This can be rewritten as
    650.                 // invAngleRange = 1.0 / (cosInnerAngle - cosOuterAngle)
    651.                 // SdotL * invAngleRange + (-cosOuterAngle * invAngleRange)
    652.                 // If we precompute the terms in a MAD instruction
    653.                 float cosOuterAngle = Mathf.Cos(Mathf.Deg2Rad * spotAngle * 0.5f);
    654.                 // We neeed to do a null check for particle lights
    655.                 // This should be changed in the future
    656.                 // Particle lights will use an inline function
    657.                 float cosInnerAngle;
    658.                 if (innerSpotAngle.HasValue)
    659.                     cosInnerAngle = Mathf.Cos(innerSpotAngle.Value * Mathf.Deg2Rad * 0.5f);
    660.                 else
    661.                     cosInnerAngle = Mathf.Cos((2.0f * Mathf.Atan(Mathf.Tan(spotAngle * 0.5f * Mathf.Deg2Rad) * (64.0f - 18.0f) / 64.0f)) * 0.5f);
    662.                 float smoothAngleRange = Mathf.Max(0.001f, cosInnerAngle - cosOuterAngle);
    663.                 float invAngleRange = 1.0f / smoothAngleRange;
    664.                 float add = -cosOuterAngle * invAngleRange;
    665.                 lightAttenuation.z = invAngleRange;
    666.                 lightAttenuation.w = add;
    667.             }
    668.         }
    669.  
    670.         public static void InitializeLightConstants_Common(NativeArray<VisibleLight> lights, int lightIndex, out Vector4 lightPos, out Vector4 lightColor, out Vector4 lightAttenuation, out Vector4 lightSpotDir, out Vector4 lightOcclusionProbeChannel)
    671.         {
    672.             lightPos = k_DefaultLightPosition;
    673.             lightColor = k_DefaultLightColor;
    674.             lightOcclusionProbeChannel = k_DefaultLightsProbeChannel;
    675.             lightAttenuation = k_DefaultLightAttenuation;
    676.             lightSpotDir = k_DefaultLightSpotDirection;
    677.  
    678.             // When no lights are visible, main light will be set to -1.
    679.             // In this case we initialize it to default values and return
    680.             if (lightIndex < 0)
    681.                 return;
    682.  
    683.             VisibleLight lightData = lights[lightIndex];
    684.             if (lightData.lightType == LightType.Directional)
    685.             {
    686.                 Vector4 dir = -lightData.localToWorldMatrix.GetColumn(2);
    687.                 lightPos = new Vector4(dir.x, dir.y, dir.z, 0.0f);
    688.             }
    689.             else
    690.             {
    691.                 Vector4 pos = lightData.localToWorldMatrix.GetColumn(3);
    692.                 lightPos = new Vector4(pos.x, pos.y, pos.z, 1.0f);
    693.             }
    694.  
    695.             // VisibleLight.finalColor already returns color in active color space
    696.             lightColor = lightData.finalColor;
    697.  
    698.             GetLightAttenuationAndSpotDirection(
    699.                 lightData.lightType, lightData.range, lightData.localToWorldMatrix,
    700.                 lightData.spotAngle, lightData.light?.innerSpotAngle,
    701.                 out lightAttenuation, out lightSpotDir);
    702.  
    703.             Light light = lightData.light;
    704.  
    705.             if (light != null && light.bakingOutput.lightmapBakeType == LightmapBakeType.Mixed &&
    706.                 0 <= light.bakingOutput.occlusionMaskChannel &&
    707.                 light.bakingOutput.occlusionMaskChannel < 4)
    708.             {
    709.                 lightOcclusionProbeChannel[light.bakingOutput.occlusionMaskChannel] = 1.0f;
    710.             }
    711.         }
    712.     }
    713.  
    714.     internal enum URPProfileId
    715.     {
    716.         // CPU
    717.         UniversalRenderTotal,
    718.         UpdateVolumeFramework,
    719.         RenderCameraStack,
    720.  
    721.         // GPU
    722.         AdditionalLightsShadow,
    723.         ColorGradingLUT,
    724.         CopyColor,
    725.         CopyDepth,
    726.         DepthNormalPrepass,
    727.         DepthPrepass,
    728.  
    729.         // DrawObjectsPass
    730.         DrawOpaqueObjects,
    731.         DrawTransparentObjects,
    732.  
    733.         // RenderObjectsPass
    734.         //RenderObjects,
    735.  
    736.         MainLightShadow,
    737.         ResolveShadows,
    738.         SSAO,
    739.  
    740.         // PostProcessPass
    741.         StopNaNs,
    742.         SMAA,
    743.         GaussianDepthOfField,
    744.         BokehDepthOfField,
    745.         MotionBlur,
    746.         PaniniProjection,
    747.         UberPostProcess,
    748.         Bloom,
    749.  
    750.         FinalBlit
    751.     }
    752. }
    753.  
    Pictures keep loaded and staking...