Search Unity

Question 2D texture to mat type issue

Discussion in 'Scripting' started by ECE191, Mar 12, 2023.

  1. ECE191

    ECE191

    Joined:
    Mar 12, 2023
    Posts:
    2
    I'm using an example provided by Qualcomm (SnapDragon Spaces) in junction with their A3 smart glasses in order to get camera frame access. I'm having an issue converting from a 2D texture to a mat since the image appears white when I run the sample on an android phone through the glasses. The code below is untouched except for the commented-out bit inside UpdateCameraTexture(). I need to convert the texture2D to a mat format to do the necessary image processing (Canny edge detection through OpenCVforUnity). Any pointers in the right direction are greatly appreciated! :)

    Doc for OpenCVforUnity: https://enoxsoftware.github.io/OpenCVForUnity/3.0.0/doc/html/annotated.html

    Doc for A3 camera frame access sample: https://docs.spaces.qualcomm.com/unity/samples/CameraFrameAccessSample.html

    SDK: https://spaces.qualcomm.com/sdk/

    Code (CSharp):
    1. /******************************************************************************
    2. * File: CameraFrameAccessSampleController.cs
    3. * Copyright (c) 2023 Qualcomm Technologies, Inc. and/or its subsidiaries. All rights reserved.
    4. *
    5. ******************************************************************************/
    6.  
    7. using OpenCVForUnity;
    8. using OpenCVForUnity.UnityUtils;
    9. using OpenCVForUnity.ImgprocModule;
    10.  
    11. using OpenCVForUnity.CoreModule;
    12. using System;
    13. using Unity.Collections;
    14. using Unity.Collections.LowLevel.Unsafe;
    15. using UnityEngine;
    16. using UnityEngine.UI;
    17. using UnityEngine.XR.ARFoundation;
    18. using UnityEngine.XR.ARSubsystems;
    19.  
    20. namespace Qualcomm.Snapdragon.Spaces.Samples
    21. {
    22.     public class CameraFrameAccessSampleController : SampleController
    23.     {
    24.         public RawImage CameraRawImage;
    25.         public Text[] ResolutionTexts;
    26.         public Text[] FocalLengthTexts;
    27.         public Text[] PrincipalPointTexts;
    28.         public Text DeviceNotSupportedText;
    29.        
    30.  
    31.         private ARCameraManager _cameraManager;
    32.         private NativeArray<XRCameraConfiguration> _cameraConfigs;
    33.         private float _targetFPS;
    34.         private float _frameTime;
    35.         private float _currentFrameTime;
    36.         private bool _feedPaused;
    37.         private bool _deviceSupported;
    38.  
    39.         private XRCameraIntrinsics _intrinsics;
    40.         private XRCpuImage _lastCpuImage;
    41.         private Texture2D _cameraTexture;
    42.  
    43.         public void Awake() {
    44.             _cameraManager = FindObjectOfType<ARCameraManager>();
    45.         }
    46.  
    47.         public override void Start() {
    48.             base.Start();
    49.  
    50.             _deviceSupported = CheckDeviceSupported();
    51.             if (!_deviceSupported) {
    52.                 OnDeviceNotSupported();
    53.                 return;
    54.             }
    55.            
    56.             if (!SubsystemChecksPassed) {
    57.                 return;
    58.             }
    59.  
    60.             _deviceSupported = FindSupportedConfiguration();
    61.             if (!_deviceSupported) {
    62.                 OnDeviceNotSupported();
    63.                 return;
    64.             }
    65.  
    66.             _targetFPS = (int) _cameraConfigs[0].framerate;
    67.             _frameTime = 1 / _targetFPS;
    68.             _currentFrameTime = _frameTime;
    69.             UpdateCameraIntrinsics();
    70.         }
    71.  
    72.         public override void Update() {
    73.             base.Update();
    74.            
    75.             if (!SubsystemChecksPassed || !_deviceSupported) {
    76.                 return;
    77.             }
    78.             if (_feedPaused || _targetFPS <= 0) {
    79.                 return;
    80.             }
    81.  
    82.             _currentFrameTime -= Time.deltaTime;
    83.             if (_currentFrameTime <= 0) {
    84.                 _currentFrameTime = _frameTime;
    85.                 _lastCpuImage = new XRCpuImage();
    86.                 if (!_cameraManager.TryAcquireLatestCpuImage(out _lastCpuImage)) {
    87.                     Debug.Log("Failed to acquire latest cpu image.");
    88.                     return;
    89.                 }
    90.                
    91.                 UpdateCameraTexture(_lastCpuImage);
    92.             }
    93.         }
    94.  
    95.         private unsafe void UpdateCameraTexture(XRCpuImage image) {
    96.                 var format = TextureFormat.RGBA32;
    97.  
    98.                 if (_cameraTexture == null || _cameraTexture.width != image.width || _cameraTexture.height != image.height)
    99.                 {
    100.                     _cameraTexture = new Texture2D(image.width, image.height, format, false);
    101.                 }
    102.  
    103.                 var conversionParams = new XRCpuImage.ConversionParams(image, format);
    104.  
    105.                 var rawTextureData = _cameraTexture.GetRawTextureData<byte>();
    106.          
    107.  
    108.                 image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
    109.              
    110.                 image.Dispose();
    111.  
    112.                 /*
    113.                 Mat MatImage = new Mat (image.width, image.height,CvType.CV_8UC4);
    114.  
    115.                 Mat proccImage = new Mat (image.width, image.height,CvType.CV_8UC4);
    116.  
    117.                 Utils.texture2DToMat(_cameraTexture,MatImage);
    118.  
    119.                 Imgproc.Canny(MatImage,proccImage,150,200);
    120.  
    121.                 Utils.matToTexture2D(proccImage, _cameraTexture);
    122.                 */
    123.                 _cameraTexture.Apply();
    124.  
    125.                 CameraRawImage.texture = _cameraTexture;
    126.         }
    127.  
    128.         private void UpdateCameraIntrinsics() {
    129.             if (!_cameraManager.TryGetIntrinsics(out _intrinsics)) {
    130.                 Debug.Log("Failed to acquire camera intrinsics.");
    131.                 return;
    132.             }
    133.            
    134.             ResolutionTexts[0].text = _intrinsics.resolution.x.ToString();
    135.             ResolutionTexts[1].text = _intrinsics.resolution.y.ToString();
    136.             FocalLengthTexts[0].text = _intrinsics.focalLength.x.ToString("#0.00");
    137.             FocalLengthTexts[1].text = _intrinsics.focalLength.y.ToString("#0.00");
    138.             PrincipalPointTexts[0].text = _intrinsics.principalPoint.x.ToString("#0.00");
    139.             PrincipalPointTexts[1].text = _intrinsics.principalPoint.y.ToString("#0.00");
    140.         }
    141.  
    142.         private bool FindSupportedConfiguration() {
    143.             _cameraConfigs = _cameraManager.GetConfigurations(Allocator.Persistent);
    144.             return _cameraConfigs.Length > 0;
    145.         }
    146.  
    147.         private bool CheckDeviceSupported() {
    148.             /* Currently support only Motorola Rogue */
    149.             bool deviceSupported = SystemInfo.deviceModel.ToLower().Contains("motorola edge");
    150.             return deviceSupported;
    151.         }
    152.  
    153.         private void OnDeviceNotSupported() {
    154.             foreach (var content in ContentOnPassed) {
    155.                 content.SetActive(false);
    156.             }
    157.             foreach (var content in ContentOnFailed) {
    158.                 content.SetActive(true);
    159.             }
    160.  
    161.             DeviceNotSupportedText.text = "This feature is not currently supported on this device.";
    162.         }
    163.  
    164.         public void OnPausePress() {
    165.             _feedPaused = true;
    166.         }
    167.  
    168.         public void OnResumePress() {
    169.             _feedPaused = false;
    170.             _currentFrameTime = _frameTime;
    171.         }
    172.        
    173.         protected override bool CheckSubsystem() {
    174.             return _cameraManager.subsystem?.running ?? false;
    175.         }
    176.     }
    177. }
    178.  
     
  2. Kurt-Dekker

    Kurt-Dekker

    Joined:
    Mar 16, 2013
    Posts:
    38,697
  3. dyuchenhe

    dyuchenhe

    Joined:
    Mar 15, 2023
    Posts:
    1
    Code (CSharp):
    1. cv_bridge::CvImagePtr img_ptr = cv_bridge::toCvCopy(msg, msg.encoding);
    2.         // std::cout << "msg encoding: " << msg.encoding << std::endl; // bgr8
    3.         // std::cout << "img encoding: " << img_ptr->encoding << std::endl;
    4.  
    5.         cv::Mat frame = img_ptr->image;
    6.         // std::cout << frame.rows << std::endl;
    7.         // std::cout << frame.cols << std::endl;
    8.         // std::cout << frame.channels() << std::endl;
    9.         // cv::imwrite("/home/jetson/dev_ws/image.jpg",frame);
    10.      
    11.         // set single channel and binary image
    12.         cv::Mat img_0_1;
    13.         frame.convertTo(img_0_1, CV_32FC3, 1.f/255);
    14.  
    15.         cv::Mat bgr[3]; // splitting channels from original image
    16.         cv::Mat single_channel_image(img_height, img_width, CV_32FC1, cv::Scalar(0));
    17.         // cv::Mat binary_image(img_height, img_width, CV_8UC1, cv::Scalar(0));
    18.  
    19.         cv::split(img_0_1,bgr);
    20.         single_channel_image = -2954.6507578 * bgr[0] * 255 +
    21.                        -1753.88636375 * bgr[1] * 255 +
    22.                        3325.96402434 * bgr[2] * 255;
    23.         cv::threshold(single_channel_image, single_channel_image,
    24.                 190000.0, 255.0, 0);
    25.         single_channel_image.convertTo(single_channel_image, CV_8UC1);
    26.  
    27.         uint8_t* single_channel_pixel_ptr = (uint8_t*)single_channel_image.data;
    28.         int pos_pixels_count = 0;
    29.         for(int i = 0; i < frame.rows; i++){
    30.             for(int j = 0; j < frame.cols; j++){
    31.                 if(single_channel_pixel_ptr[i*frame.cols + j] == 255){
    32.                     pos_pixels_count += 1;
    33.                 }
    34.             }
    35.         }
    36.         // std::cout << "pos_pixels_count: " << pos_pixels_count << std::endl;
    37.         // lower 650 upper 20500
    38.         auto message = std_msgs::msg::Bool();
    39.         if(650 < pos_pixels_count && pos_pixels_count < 20500){
    40.             message.data = true;
    41.         }
    42.         else{
    43.             message.data = false;
    44.         }
    45.         this->pos_pixels_over_thresh_pub_->publish(message);
    46.          
    47.         /*
    48.         uint8_t* original_pixel_ptr = (uint8_t*)frame.data;
    49.         uint8_t* single_channel_pixel_ptr = (uint8_t*)single_channel_image.data;
    50.         int cn = frame.channels(); // original img channel number
    51.         cv::Scalar_<uint8_t> original_bgr_pixel;
    52.  
    53.         for(int i = 0; i < frame.rows; i++){
    54.             for(int j = 0; j < frame.cols; j++){
    55.                 original_bgr_pixel.val[0] =
    56.                     original_pixel_ptr[i*frame.cols*cn + j*cn + 0]; // B
    57.                 original_bgr_pixel.val[1] =
    58.                     original_pixel_ptr[i*frame.cols*cn + j*cn + 1]; // G
    59.                 original_bgr_pixel.val[2] =
    60.                     original_pixel_ptr[i*frame.cols*cn + j*cn + 2]; // R
    61.                 // do something with BGR values...
    62.                 single_channel_pixel_ptr[i*frame.cols + j] =
    63.                     0.299 * original_bgr_pixel.val[2] +
    64.                     0.587 * original_bgr_pixel.val[1] +
    65.                     0.114 * original_bgr_pixel.val[0];
    66.             }
    67.         }
    68.         */
    Here is a basic example for accessing and writing pixels into the cv::Mat
    In sum, here I received a 3 channel image
    Then I do cone detection by multiplying each pixel RGB channel with a learned coefficient, then I threshold the result and put classification on a single channel mask image.
    Notice the last commented out section for accessing each individual pixel channel value in a 3 channel image by double loop iteration.