Search Unity

XYZ From Depth Data in Meters (ARKit Data)

Discussion in 'AR/VR (XR) Discussion' started by raeldor, Jun 30, 2019.

  1. raeldor

    raeldor

    Joined:
    Jul 8, 2013
    Posts:
    55
    Hi,

    I pulled pulled depth (in meters) and color information from ARKit and and trying to reconstruct to world space. If I use the ARKit intrinsics matrix and reference size in my calculations half my point cloud seems to be missing, the model is on it's side (ARKit seems to return image and depth information sideways for some reason, not sure if that's related), and it seems warped; like this...

    https://imgur.com/wNpwCl5

    Is there a way to reconstruct this to get the correct X and Y from the depth in meters using the unity camera information? So far my code looks like this...

    Code (CSharp):
    1.     void FrameUpdate(UnityARCamera cam)
    2.     {
    3.         // get size of camera texture
    4.         UnityARSessionNativeInterface m_session = UnityARSessionNativeInterface.GetARSessionNativeInterface();
    5.  
    6.         // get size of canvas
    7.         Canvas uiCanvas = FindObjectOfType<Canvas>();
    8.         int panelWidth = (int)uiCanvas.pixelRect.width;
    9.         int panelHeight = (int)uiCanvas.pixelRect.height;
    10.  
    11.         // if session first frame
    12.         if (!sessionStarted)
    13.         {
    14.             DepthPlugin.GetCurrentFrameCameraTextureSize(m_session.GetNativeSessionPtr(), out textureWidth, out textureHeight);
    15.             Debug.Log("Get camera texture size returned " + textureWidth.ToString() + "," + textureHeight.ToString());
    16.  
    17.             // create 2d texture for camera texture
    18.             cameraTexture = new Texture2D(textureWidth, textureHeight, TextureFormat.RGBA32, false);
    19. //            GetComponent<RawImage>().texture = cameraTexture;
    20.  
    21.             // allocate some memory for a raw texture buffer
    22.             rawBuffer = Marshal.AllocHGlobal(textureWidth * textureHeight * 4);
    23.  
    24.             // flag as started
    25.             sessionStarted = true;
    26.  
    27.         }
    28. //        UnityARSessionNativeInterface.ARFrameUpdatedEvent -= FirstFrameUpdate;
    29.  
    30.         // get texture from camera
    31.         DepthPlugin.GetCurrentFrameCameraTextureIntoBufferAsARGB(m_session.GetNativeSessionPtr(), rawBuffer, textureWidth, textureHeight);
    32.         cameraTexture.LoadRawTextureData(rawBuffer, textureWidth * textureHeight * 4);
    33.         cameraTexture.Apply();
    34.  
    35.         // if we got a depth buffer
    36.         DepthPlugin.GetCurrentFrameDepthBufferSize(m_session.GetNativeSessionPtr(), out depthWidth, out depthHeight, out depthBytesPerPixel);
    37.         Debug.Log("Get depth buffer size returned " + depthWidth.ToString() + "," + depthHeight.ToString() + "bytes per pixel " + depthBytesPerPixel.ToString());
    38.         Matrix4x4 intrinsicMatrix;
    39.         Vector2 intrinsicSize;
    40.         DepthPlugin.GetCurrentFrameIntrinsics(m_session.GetNativeSessionPtr(), out intrinsicMatrix, out intrinsicSize);
    41.         Debug.Log("matrix 2,0 is " + intrinsicMatrix.m02.ToString() + " 2,1 is " + intrinsicMatrix.m12.ToString() + " 0,0 is " + intrinsicMatrix.m00.ToString() + " 1,1 is " + intrinsicMatrix.m11.ToString());
    42.         if (depthWidth > 0 && intrinsicSize.x > 0)
    43.         {
    44.             // allocate buffer
    45.             if (!depthStarted)
    46.             {
    47.                 // create 2d texture for camera texture
    48.                 depthTexture = new Texture2D(depthWidth, depthHeight, TextureFormat.RGBA32, false);
    49.                 GetComponent<RawImage>().texture = depthTexture;
    50.  
    51.                 // allocate some memory for a raw texture buffer
    52.                 rawDepthBuffer = Marshal.AllocHGlobal(depthWidth * depthHeight * depthBytesPerPixel);
    53.             }
    54.             else
    55.                 return;
    56.  
    57.             // get updated buffer
    58.             DepthPlugin.GetCurrentFrameDepthBufferIntoBuffer(m_session.GetNativeSessionPtr(), rawDepthBuffer, depthWidth, depthHeight, depthBytesPerPixel);
    59.  
    60.             // update texture from buffer
    61.             Color[] pixels = depthTexture.GetPixels();
    62.             unsafe
    63.             {
    64.                 float* bufferPointer = (float*)rawDepthBuffer.ToPointer();
    65.                 for (int i = 0; i < pixels.Length; i++)
    66.                 {
    67.                     pixels[i] = new Vector4(bufferPointer[i], bufferPointer[i], bufferPointer[i], 1.0f);
    68.                 }
    69.             }
    70.             depthTexture.SetPixels(pixels);
    71.             depthTexture.Apply();
    72.  
    73.             // create point cloud first time around
    74.             float minDepth = float.PositiveInfinity;
    75.             float maxDepth = float.NegativeInfinity;
    76.             float minX = float.PositiveInfinity;
    77.             float maxX = float.NegativeInfinity;
    78.             float minY = float.PositiveInfinity;
    79.             float maxY = float.NegativeInfinity;
    80.             // get intrinsic matrix
    81.             Color[] texturePixels = cameraTexture.GetPixels();
    82.             unsafe
    83.             {
    84.                 // build points
    85.                 pointCloud = new List<Vector3>();
    86.                 colorCloud = new List<Color>();
    87.                 float* bufferPointer = (float*)rawDepthBuffer.ToPointer();
    88.                 for (int v = 0; v < depthHeight; v++)
    89.                 {
    90.                     for (int u = 0; u < depthWidth; u++)
    91.                     {
    92.                         // calculate x, y, z
    93.                         float z = bufferPointer[v*depthWidth+u];
    94.                         if (z < minDepth)
    95.                             minDepth = z;
    96.                         if (z > maxDepth)
    97.                             maxDepth = z;
    98.                         if (z > 0.01f && z < 1.0f)
    99.                         {
    100.                             // use ar kit camera intrinsics
    101.                             float principleX = intrinsicMatrix.m20;
    102.                             float principleY = intrinsicMatrix.m21;
    103.                             float focalX = intrinsicMatrix.m00;
    104.                             float focalY = intrinsicMatrix.m11;
    105.  
    106.                             // add point
    107.                             float U = (float)u / (float)depthWidth;
    108.                             float V = (float)v / (float)depthHeight;
    109.                             float x = ((float)U*intrinsicSize.x - principleX) * z / focalX;
    110.                             float y = ((float)V*intrinsicSize.y - principleY) * z / focalY;
    111.  
    112.                             // add point to cloud
    113.                             pointCloud.Add(new Vector3(x, y, z));
    114.  
    115.                             // find color for this UV
    116.                             int textureX = (int)((float)textureWidth * U);
    117.                             int textureY = (int)((float)textureHeight * V);
    118.                             Color thisColor = texturePixels[(textureY * textureWidth) + textureX];
    119.                             colorCloud.Add(new Color(thisColor.r, thisColor.g, thisColor.b));
    120.  
    121.                             if (x < minX)
    122.                                 minX = x;
    123.                             if (x > maxX)
    124.                                 maxX = x;
    125.                             if (y < minY)
    126.                                 minY = y;
    127.                             if (y > maxY)
    128.                                 maxY = y;
    129.                         }
    130.                     }
    131.                 }
    132.             }
    133.             Debug.Log("Unity min depth is " + minDepth.ToString() + " max depth is " + maxDepth.ToString() + " min x is " + minX.ToString() + " max x is " + maxX.ToString() + " min y is " + minY.ToString() + " max y is " + maxY.ToString());
    134.             pointCloudChanged = true;
    135.  
    136.             // started getting depth info
    137.             depthStarted = true;
    138.         }
    139.     }
    140.  
     
  2. raeldor

    raeldor

    Joined:
    Jul 8, 2013
    Posts:
    55
    So, I think half the point cloud data missing is just the mesh vertex limit. But when zooming and panning around the object, it still seems warped. If the Z is the distance from the camera does that mean the 3d points will be warped for points that are further away from being directly in front of the camera because of the angle in relation to the camera? Is there any way to undo that kind of warping?

    These images show what I am trying to express. In the first image here I try to align to minimize the gaps, which I think should then represent the camera angle when the picture was taken since the gaps are normally parts the camera couldn't reach. But when I do that alignment it ends up skewed..



    When I try to get a front-on view like when I took the photo I end up with MORE gaps...



    I am wondering if this is caused by the depth sensor being on one side of the center of the phone and the camera on the other side? If this is true though, how come the camera image does not have the same issue? Is there a transform somewhere that's used to correct it?
     
    Last edited: Jun 30, 2019