Search Unity

  1. Megacity Metro Demo now available. Download now.
    Dismiss Notice
  2. Unity support for visionOS is now available. Learn more in our blog post.
    Dismiss Notice

Resolved AROcclusion depth map without occlusion

Discussion in 'AR' started by JeromeGodboutAmotus, Aug 31, 2021.

  1. JeromeGodboutAmotus

    JeromeGodboutAmotus

    Joined:
    Apr 24, 2020
    Posts:
    40
    Hi,
    I manage to extract the depth map do what I needed with it. But I do not want the environment occlusion with the depth map. Is there a way to have the Occlusion manager can extract the best env depth without doing any actual occlusion?

    I want the depth texture but no depth occlusion (it make just ugly problems with my application).
    Any idea how to achieve that? Any layer can be excluded form the env dept occlusion?

    This is so annoying to have the depth so hard to access without the plane and the occlusion. Need more freedom with the depth map please.
     
  2. KyryloKuzyk

    KyryloKuzyk

    Joined:
    Nov 4, 2013
    Posts:
    1,128
    Yes, there is no built-in way to receive environment occlusion texture from the AROcclusionManager without enabling the 'Environment Depth Mode'.

    What you can do is write a custom shader for tracked planes to prevent flickering.

    Or to disable the occlusion completely you can write a custom camera background shader that doesn't write to the depth buffer. For example, here is how to do this for iOS:
    1. Copy the ARKitBackground.shader and rename it.
    2. Enable the 'Use Custom Material' on ARCameraBackground and set the reference to material with copied shader.
    3. Remove the depth semantics from the shader.
    Here is a resulted shader. You can also delete the depthValue calculations to increase performance.
    Code (CSharp):
    1. Shader "Unlit/ARKitBackgroundNoOcclusion"
    2. {
    3.     Properties
    4.     {
    5.         _textureY ("TextureY", 2D) = "white" {}
    6.         _textureCbCr ("TextureCbCr", 2D) = "black" {}
    7.         _HumanStencil ("HumanStencil", 2D) = "black" {}
    8.         _HumanDepth ("HumanDepth", 2D) = "black" {}
    9.         _EnvironmentDepth ("EnvironmentDepth", 2D) = "black" {}
    10.     }
    11.     SubShader
    12.     {
    13.         Tags
    14.         {
    15.             "Queue" = "Background"
    16.             "RenderType" = "Background"
    17.             "ForceNoShadowCasting" = "True"
    18.         }
    19.  
    20.         Pass
    21.         {
    22.             Cull Off
    23.             ZTest Always
    24.             ZWrite On
    25.             Lighting Off
    26.             LOD 100
    27.             Tags
    28.             {
    29.                 "LightMode" = "Always"
    30.             }
    31.  
    32.  
    33.             HLSLPROGRAM
    34.  
    35.             #pragma vertex vert
    36.             #pragma fragment frag
    37.  
    38.             #pragma multi_compile_local __ ARKIT_BACKGROUND_URP ARKIT_BACKGROUND_LWRP
    39.             #pragma multi_compile_local __ ARKIT_HUMAN_SEGMENTATION_ENABLED ARKIT_ENVIRONMENT_DEPTH_ENABLED
    40.  
    41.  
    42. #if ARKIT_BACKGROUND_URP
    43.  
    44.             #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    45.             #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl"
    46.  
    47.             #define ARKIT_TEXTURE2D_HALF(texture) TEXTURE2D(texture)
    48.             #define ARKIT_SAMPLER_HALF(sampler) SAMPLER(sampler)
    49.             #define ARKIT_TEXTURE2D_FLOAT(texture) TEXTURE2D(texture)
    50.             #define ARKIT_SAMPLER_FLOAT(sampler) SAMPLER(sampler)
    51.             #define ARKIT_SAMPLE_TEXTURE2D(texture,sampler,texcoord) SAMPLE_TEXTURE2D(texture,sampler,texcoord)
    52.  
    53. #elif ARKIT_BACKGROUND_LWRP
    54.  
    55.             #include "Packages/com.unity.render-pipelines.lightweight/ShaderLibrary/Core.hlsl"
    56.             #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl"
    57.  
    58.             #define ARKIT_TEXTURE2D_HALF(texture) TEXTURE2D(texture)
    59.             #define ARKIT_SAMPLER_HALF(sampler) SAMPLER(sampler)
    60.             #define ARKIT_TEXTURE2D_FLOAT(texture) TEXTURE2D(texture)
    61.             #define ARKIT_SAMPLER_FLOAT(sampler) SAMPLER(sampler)
    62.             #define ARKIT_SAMPLE_TEXTURE2D(texture,sampler,texcoord) SAMPLE_TEXTURE2D(texture,sampler,texcoord)
    63.  
    64. #else // Legacy RP
    65.  
    66.             #include "UnityCG.cginc"
    67.  
    68.             #define real4 half4
    69.             #define real4x4 half4x4
    70.             #define TransformObjectToHClip UnityObjectToClipPos
    71.             #define FastSRGBToLinear GammaToLinearSpace
    72.  
    73.             #define ARKIT_TEXTURE2D_HALF(texture) UNITY_DECLARE_TEX2D_HALF(texture)
    74.             #define ARKIT_SAMPLER_HALF(sampler)
    75.             #define ARKIT_TEXTURE2D_FLOAT(texture) UNITY_DECLARE_TEX2D_FLOAT(texture)
    76.             #define ARKIT_SAMPLER_FLOAT(sampler)
    77.             #define ARKIT_SAMPLE_TEXTURE2D(texture,sampler,texcoord) UNITY_SAMPLE_TEX2D(texture,texcoord)
    78.  
    79. #endif
    80.  
    81.  
    82.             struct appdata
    83.             {
    84.                 float3 position : POSITION;
    85.                 float2 texcoord : TEXCOORD0;
    86.             };
    87.  
    88.             struct v2f
    89.             {
    90.                 float4 position : SV_POSITION;
    91.                 float2 texcoord : TEXCOORD0;
    92.             };
    93.  
    94.             struct fragment_output
    95.             {
    96.                 real4 color : SV_Target;
    97.                 // float depth : SV_Depth;
    98.             };
    99.  
    100.  
    101.             CBUFFER_START(UnityARFoundationPerFrame)
    102.             // Device display transform is provided by the AR Foundation camera background renderer.
    103.             float4x4 _UnityDisplayTransform;
    104.             float _UnityCameraForwardScale;
    105.             CBUFFER_END
    106.  
    107.  
    108.             v2f vert (appdata v)
    109.             {
    110.                 // Transform the position from object space to clip space.
    111.                 float4 position = TransformObjectToHClip(v.position);
    112.  
    113.                 // Remap the texture coordinates based on the device rotation.
    114.                 float2 texcoord = mul(float3(v.texcoord, 1.0f), _UnityDisplayTransform).xy;
    115.  
    116.                 v2f o;
    117.                 o.position = position;
    118.                 o.texcoord = texcoord;
    119.                 return o;
    120.             }
    121.  
    122.  
    123.             CBUFFER_START(ARKitColorTransformations)
    124.             static const real4x4 s_YCbCrToSRGB = real4x4(
    125.                 real4(1.0h,  0.0000h,  1.4020h, -0.7010h),
    126.                 real4(1.0h, -0.3441h, -0.7141h,  0.5291h),
    127.                 real4(1.0h,  1.7720h,  0.0000h, -0.8860h),
    128.                 real4(0.0h,  0.0000h,  0.0000h,  1.0000h)
    129.             );
    130.             CBUFFER_END
    131.  
    132.  
    133.             inline float ConvertDistanceToDepth(float d)
    134.             {
    135.                 // Account for scale
    136.                 d = _UnityCameraForwardScale > 0.0 ? _UnityCameraForwardScale * d : d;
    137.  
    138.                 // Clip any distances smaller than the near clip plane, and compute the depth value from the distance.
    139.                 return (d < _ProjectionParams.y) ? 0.0f : ((1.0f / _ZBufferParams.z) * ((1.0f / d) - _ZBufferParams.w));
    140.             }
    141.  
    142.  
    143.             ARKIT_TEXTURE2D_HALF(_textureY);
    144.             ARKIT_SAMPLER_HALF(sampler_textureY);
    145.             ARKIT_TEXTURE2D_HALF(_textureCbCr);
    146.             ARKIT_SAMPLER_HALF(sampler_textureCbCr);
    147. #if ARKIT_ENVIRONMENT_DEPTH_ENABLED
    148.             ARKIT_TEXTURE2D_FLOAT(_EnvironmentDepth);
    149.             ARKIT_SAMPLER_FLOAT(sampler_EnvironmentDepth);
    150. #elif ARKIT_HUMAN_SEGMENTATION_ENABLED
    151.             ARKIT_TEXTURE2D_HALF(_HumanStencil);
    152.             ARKIT_SAMPLER_HALF(sampler_HumanStencil);
    153.             ARKIT_TEXTURE2D_FLOAT(_HumanDepth);
    154.             ARKIT_SAMPLER_FLOAT(sampler_HumanDepth);
    155. #endif // ARKIT_HUMAN_SEGMENTATION_ENABLED
    156.  
    157.  
    158.             fragment_output frag (v2f i)
    159.             {
    160.                 // Sample the video textures (in YCbCr).
    161.                 real4 ycbcr = real4(ARKIT_SAMPLE_TEXTURE2D(_textureY, sampler_textureY, i.texcoord).r,
    162.                                     ARKIT_SAMPLE_TEXTURE2D(_textureCbCr, sampler_textureCbCr, i.texcoord).rg,
    163.                                     1.0h);
    164.  
    165.                 // Convert from YCbCr to sRGB.
    166.                 real4 videoColor = mul(s_YCbCrToSRGB, ycbcr);
    167.  
    168. #if !UNITY_COLORSPACE_GAMMA
    169.                 // If rendering in linear color space, convert from sRGB to RGB.
    170.                 videoColor.xyz = FastSRGBToLinear(videoColor.xyz);
    171. #endif // !UNITY_COLORSPACE_GAMMA
    172.  
    173.                 // Assume the background depth is the back of the depth clipping volume.
    174.                 float depthValue = 0.0f;
    175.  
    176. #if ARKIT_ENVIRONMENT_DEPTH_ENABLED
    177.                 // Sample the environment depth (in meters).
    178.                 float envDistance = ARKIT_SAMPLE_TEXTURE2D(_EnvironmentDepth, sampler_EnvironmentDepth, i.texcoord).r;
    179.  
    180.                 // Convert the distance to depth.
    181.                 depthValue = ConvertDistanceToDepth(envDistance);
    182. #elif ARKIT_HUMAN_SEGMENTATION_ENABLED
    183.                 // Check the human stencil, and skip non-human pixels.
    184.                 if (ARKIT_SAMPLE_TEXTURE2D(_HumanStencil, sampler_HumanStencil, i.texcoord).r > 0.5h)
    185.                 {
    186.                     // Sample the human depth (in meters).
    187.                     float humanDistance = ARKIT_SAMPLE_TEXTURE2D(_HumanDepth, sampler_HumanDepth, i.texcoord).r;
    188.  
    189.                     // Convert the distance to depth.
    190.                     depthValue = ConvertDistanceToDepth(humanDistance);
    191.                 }
    192. #endif // ARKIT_HUMAN_SEGMENTATION_ENABLED
    193.  
    194.                 fragment_output o;
    195.                 o.color = videoColor;
    196.                 // o.depth = depthValue;
    197.                 return o;
    198.             }
    199.  
    200.             ENDHLSL
    201.         }
    202.     }
    203. }
    204.  
     
    Slaktus and JeromeGodboutAmotus like this.
  3. JeromeGodboutAmotus

    JeromeGodboutAmotus

    Joined:
    Apr 24, 2020
    Posts:
    40
    Thanks will do that, I do not even track plane, the whole AR assumption of plane and occlusion is a pain, it's a single use case that doesn't fit all application. Some of us want the depth map and be done with it. I think I will open a ticket for that. Let us have the depth map and get ride of those plane detection or depth occlusion processing waste.

    But thanks again, I'm fairly new to shader in Unity but I will try that.
     
  4. JeromeGodboutAmotus

    JeromeGodboutAmotus

    Joined:
    Apr 24, 2020
    Posts:
    40
    Sorry about all those question. But this is for ARKit, how do I setup for both ARKit and ARCore. I did found the the source shaders from the packages:
    Packages/ARKit XR Plugin/Assets/Shaders/ARKitBackground.shader
    or
    Packages/ARCore XR Plugin/Assets/Shaders/ARCoreBackground.shader

    I did copy those to make sure I have the right version with my packages version and comment out the depth assignation no both.
    Now I have into my
    Assets/Shaders/ARKitBackgroundNoOcclusion.shader
    Assets/Shaders/ARCoreBackgroundNoOcclusion.shader

    Wonder How to make a materials that can use either one based on the current platform?!? Shall I make 2 materials and assign the camera material at runtime based on current platform?
     
  5. JeromeGodboutAmotus

    JeromeGodboutAmotus

    Joined:
    Apr 24, 2020
    Posts:
    40
  6. JeromeGodboutAmotus

    JeromeGodboutAmotus

    Joined:
    Apr 24, 2020
    Posts:
    40
    If it might help people into the futur, the modified shader (one for ARKit and one for ARCore) did not compile properly when using the other platform and I did not found any way to select a given shader script per platform. I had to add the following to the shader script and to the build preprocessor:

    Editor/ShaderPreProcessing.cs
    Code (CSharp):
    1.  
    2. using System.Collections.Generic;
    3. using UnityEditor.Build;
    4. using UnityEditor.Rendering;
    5. using UnityEngine;
    6. using UnityEngine.Rendering;
    7.  
    8. public class ShaderPreProcessing : IPreprocessComputeShaders
    9. {
    10.     ShaderKeyword PlatformKeyword;
    11.  
    12.     public ShaderPreProcessing()
    13.     {
    14.         PlatformKeyword = new ShaderKeyword(
    15. #if UNITY_IOS
    16.             "AMOTUS_PLATFORM_DISABLE_IOS"
    17. #elif UNITY_ANDROID
    18.             "AMOTUS_PLATFORM_DISABLE_ANDROID"
    19. #else
    20.             "AMOTUS_PLATFORM_UNSUPPORTED"
    21. #endif
    22.         );
    23.     }
    24.  
    25.     public int callbackOrder { get => 0; }
    26.  
    27.     public void OnProcessComputeShader(ComputeShader shader, string kernelName, IList<ShaderCompilerData> data)
    28.     {
    29.         Debug.LogWarning($"OnProcessShader called: {kernelName} {shader.name}");
    30.         for (int i = data.Count - 1; i >= 0; --i)
    31.         {
    32.             if (data[i].shaderKeywordSet.IsEnabled(PlatformKeyword))
    33.             {
    34.                 Debug.LogWarning("Remove shader platform variant");
    35.                 data.RemoveAt(i);
    36.             }
    37.         }
    38.     }
    39. }
    40.  
    This remove shader with the given feature into it. So now we have to remove the code part based on current platform (negative to avoid modifying or removing other shader that doesn't use this):

    ARKitBackgroundNoOcclusion.shader
    Code (CSharp):
    1.  
    2. Pass
    3. {
    4. ...
    5. #pragma shader_feature_local AMOTUS_PLATFORM_DISABLE_ANDROID
    6. #if !defined(AMOTUS_PLATFORM_DISABLE_ANDROID)
    7. // .. shader code here
    8. #endif
    and you do the opposite into the ARCoreBackgroundNoOcclusion.shader:
    Code (CSharp):
    1. Pass
    2. {
    3. ...
    4. #pragma shader_feature_local AMOTUS_PLATFORM_DISABLE_IOS
    5. #if !defined(AMOTUS_PLATFORM_DISABLE_IOS)
    6. // .. shader code here
    7. #endif
    This would be pretty bad as the platform increase, but I only had 2 at the moment so it's not that bad. So now the compiler pass on both platform without manual intervention and the CI will still be able to compile for both platform.

    I can now set the proper material with the proper shader:

    PlaftormMaterialSelect.cs
    Code (CSharp):
    1. using UnityEngine;
    2. using UnityEngine.XR.ARFoundation;
    3.  
    4. namespace UIComponent
    5. {
    6.     public class PlatformMaterialSelect : MonoBehaviour
    7.     {
    8.         #region Unity Editor
    9.         public Material MaterialIOS;
    10.         public Material MaterialAndroid;
    11.         public ARCameraBackground CameraBackground;
    12.         #endregion
    13.  
    14.         private void Update()
    15.         {
    16.             Material mat =
    17. #if UNITY_ANDROID
    18.                 MaterialAndroid;
    19. #elif UNITY_IOS
    20.                 MaterialIOS;
    21. #else
    22.                 null;
    23. #endif
    24.             if (CameraBackground.customMaterial != mat)
    25.             {
    26.                 CameraBackground.customMaterial = mat;
    27.             }
    28.         }
    29.     }
    30. }
    31.  
     
  7. JeromeGodboutAmotus

    JeromeGodboutAmotus

    Joined:
    Apr 24, 2020
    Posts:
    40
    Oh, btw I almost forgot, removing the depth was doing the invert everything got clip out, so I enforced depth on the fragment to be at 1.0 for ARCore and 0 for ARKit to avoid all the occlusion. Big thanks to @KirillKuzyk to help me in the first place to point me in the right direction to start this.
     
    Last edited: Sep 1, 2021
  8. JeromeGodboutAmotus

    JeromeGodboutAmotus

    Joined:
    Apr 24, 2020
    Posts:
    40
    Maybe this is just a bug into the Unity 2021.1.17f1, but it seem like the compilation problems only occur the first time after modifying the script. On the second and next compilation build request the build pass. Not sure I need the whole PreProcessing above, building twice solve the issue, this is going to be pretty bad for the build CI system. I have to figure out why it generate some weird error were many symbole or function are not found. For example the real4 is not defined?!?
     
  9. JeromeGodboutAmotus

    JeromeGodboutAmotus

    Joined:
    Apr 24, 2020
    Posts:
    40
    Thanks to @TimMowrer on Slack, it turn out that if you put the AROcclusionManager on an empty object (not the ARCamera), you can still extract the depth info but the ARCamera will no more use the AROcclussion for rendering the texture.

    So I removed all the shaders and materials (less code to support and maintain) and just created an empty object at the root of the scene ARDepth which now old the AROcclussionManager component. That just work!

    Wish those behaviour were documented somewhere:
    https://docs.unity3d.com/Packages/c...ngine.XR.ARFoundation.AROcclusionManager.html
    Does said much.
     
  10. KnewK

    KnewK

    Joined:
    Sep 2, 2012
    Posts:
    19
    I put the OcclusionManager on the AR Session Origin object instead of AR Camera. The occlusion is not rendered but there is access to the depth images once you have a reference to the OcclusionManager component.
     
  11. KyryloKuzyk

    KyryloKuzyk

    Joined:
    Nov 4, 2013
    Posts:
    1,128
    There is a new feature in AR Foundation 5.0.0-pre.9. Now, you can set AROcclusionManager.OcclusionPreferenceMode to OcclusionPreferenceMode.NoOcclusion and video background shaders will not write to the depth buffer.
    Please keep in mind that it's a preview version and it's not recommended to use it in production.
     
    JeromeGodboutAmotus likes this.
  12. JeromeGodboutAmotus

    JeromeGodboutAmotus

    Joined:
    Apr 24, 2020
    Posts:
    40
    Great good news it would be more straight forward to understand what is going on that way with an explicit property. :)
     
    KyryloKuzyk likes this.
  13. Ryhter

    Ryhter

    Joined:
    Jun 3, 2017
    Posts:
    6
    @JeromeGodboutAmotus regarding the DepthMap, is there a way to get numerical values instead of a color visualization that represents the depth?
     
  14. JeromeGodboutAmotus

    JeromeGodboutAmotus

    Joined:
    Apr 24, 2020
    Posts:
    40
    You can acquire the depth image without rendering it using the OcclussionManager.
    Code (CSharp):
    1. OcclusionManager.TryAcquireEnvironmentDepthCpuImage(out XRCpuImage cpuImage)
    Take care to put the depth into non visible item of the camera scene.

    You can use some convertion based on your needs:
    Code (CSharp):
    1. XRCpuImage.ConversionParams conversionParams = new XRCpuImage.ConversionParams(cpuImage, XRCpuImageFormatExtensions.AsTextureFormat(cpuImage.format), XRCpuImage.Transformation.MirrorY);
    Convert the depth data into a texture to sample into it:
    Code (CSharp):
    1. using (NativeArray<byte> rawData = new NativeArray<byte>(cpuImage.GetConvertedDataSize(conversionParams), Allocator.Temp))
    2.                     {
    3.                         cpuImage.Convert(conversionParams, rawData);
    4.                         DepthTexture = new Texture2D(conversionParams.outputDimensions.x, conversionParams.outputDimensions.y, conversionParams.outputFormat, false, false);
    5.                         DepthTexture.LoadRawTextureData(rawData);
    6.                         DepthTexture.Apply();
    7.  
    You can either ray cast or sample the texture to get the depth at a given coordinate.
     
    andyb-unity and Ryhter like this.
  15. Ryhter

    Ryhter

    Joined:
    Jun 3, 2017
    Posts:
    6
    Thanks for the help, I'll try it out and repost some feedback.