Search Unity

[RELEASED] OpenCV for Unity

Discussion in 'Assets and Asset Store' started by EnoxSoftware, Oct 30, 2014.

  1. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    http://forum.unity3d.com/threads/released-opencv-for-unity.277080/page-16#post-2709724
    Did you failed to generate the video file when you test this sample?
     
  2. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Since opencv2.framework has been added bitcode, framework size become large.
    http://forum.unity3d.com/threads/unity-5-3-x-build-size-increase-faq.383533/

    for now ,I do not plan to publish smaller size version(reduce support modules).
     
  3. LAFI

    LAFI

    Joined:
    Sep 5, 2014
    Posts:
    47
    Hello
    please can you reply at least?
     
  4. LAFI

    LAFI

    Joined:
    Sep 5, 2014
    Posts:
    47
    i need to get the center of the rectangle on the camshift scene , it seems that isn't the same that simple face detection scene.....i nee your help
     
  5. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Sorry for my late reply.
    Code (CSharp):
    1. using UnityEngine;
    2. using System.Collections;
    3.  
    4. #if UNITY_5_3 || UNITY_5_3_OR_NEWER
    5. using UnityEngine.SceneManagement;
    6. #endif
    7. using OpenCVForUnity;
    8.  
    9. using System.Collections.Generic;
    10.  
    11. namespace OpenCVForUnitySample
    12. {
    13.         /// <summary>
    14.         /// CamShift sample.
    15.         /// referring to the http://www.computervisiononline.com/blog/tutorial-using-camshift-track-objects-video.
    16.         /// </summary>
    17.         [RequireComponent(typeof(WebCamTextureToMatHelper))]
    18.         public class CamShiftSample : MonoBehaviour
    19.         {
    20.  
    21.                 /// <summary>
    22.                 /// The colors.
    23.                 /// </summary>
    24.                 Color32[] colors;
    25.  
    26.                 /// <summary>
    27.                 /// The texture.
    28.                 /// </summary>
    29.                 Texture2D texture;
    30.  
    31.                 /// <summary>
    32.                 /// The roi point list.
    33.                 /// </summary>
    34.                 List<Point> roiPointList;
    35.  
    36.                 /// <summary>
    37.                 /// The roi rect.
    38.                 /// </summary>
    39.                 OpenCVForUnity.Rect roiRect;
    40.  
    41.                 /// <summary>
    42.                 /// The hsv mat.
    43.                 /// </summary>
    44.                 Mat hsvMat;
    45.  
    46.                 /// <summary>
    47.                 /// The roi hist mat.
    48.                 /// </summary>
    49.                 Mat roiHistMat;
    50.  
    51.                 /// <summary>
    52.                 /// The termination.
    53.                 /// </summary>
    54.                 TermCriteria termination;
    55.  
    56.                 /// <summary>
    57.                 /// The web cam texture to mat helper.
    58.                 /// </summary>
    59.                 WebCamTextureToMatHelper webCamTextureToMatHelper;
    60.  
    61.  
    62.  
    63.                 public GameObject point3D;
    64.  
    65.  
    66.    
    67.                 // Use this for initialization
    68.                 void Start ()
    69.                 {
    70.                         roiPointList = new List<Point> ();
    71.                         termination = new TermCriteria (TermCriteria.EPS | TermCriteria.COUNT, 10, 1);
    72.                        
    73.                         webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
    74.                         webCamTextureToMatHelper.Init ();
    75.  
    76.                 }
    77.  
    78.                 /// <summary>
    79.                 /// Raises the web cam texture to mat helper inited event.
    80.                 /// </summary>
    81.                 public void OnWebCamTextureToMatHelperInited ()
    82.                 {
    83.                         Debug.Log ("OnWebCamTextureToMatHelperInited");
    84.            
    85.                         Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
    86.            
    87.                         colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
    88.                         texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
    89.  
    90.                         hsvMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC3);
    91.            
    92.                         gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
    93.            
    94.                         Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
    95.            
    96.                         float width = 0;
    97.                         float height = 0;
    98.            
    99.                         width = gameObject.transform.localScale.x;
    100.                         height = gameObject.transform.localScale.y;
    101.            
    102.                         float widthScale = (float)Screen.width / width;
    103.                         float heightScale = (float)Screen.height / height;
    104.                         if (widthScale < heightScale) {
    105.                                 Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
    106.                         } else {
    107.                                 Camera.main.orthographicSize = height / 2;
    108.                         }
    109.            
    110.                         gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
    111.  
    112.                 }
    113.  
    114.                 /// <summary>
    115.                 /// Raises the web cam texture to mat helper disposed event.
    116.                 /// </summary>
    117.                 public void OnWebCamTextureToMatHelperDisposed ()
    118.                 {
    119.                         Debug.Log ("OnWebCamTextureToMatHelperDisposed");
    120.  
    121.                         hsvMat.Dispose ();
    122.                         if (roiHistMat != null)
    123.                                 roiHistMat.Dispose ();
    124.                         roiPointList.Clear ();
    125.                 }
    126.  
    127.                 // Update is called once per frame
    128.                 void Update ()
    129.                 {
    130.  
    131.                         if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) {
    132.                
    133.                                 Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
    134.  
    135.                                 Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
    136.                                 Imgproc.cvtColor (hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);
    137.                
    138.                
    139.                                 Point[] points = roiPointList.ToArray ();
    140.                
    141.                                 if (roiPointList.Count == 4) {
    142.                                                        
    143.                
    144.                                         using (Mat backProj = new Mat ()) {
    145.                                                 Imgproc.calcBackProject (new List<Mat> (new Mat[]{hsvMat}), new MatOfInt (0), roiHistMat, backProj, new MatOfFloat (0, 180), 1.0);
    146.                
    147.                                                 RotatedRect r = Video.CamShift (backProj, roiRect, termination);
    148.                                                 r.points (points);
    149.                                         }
    150.                
    151.                                         #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
    152.                                             //Touch
    153.                                             int touchCount = Input.touchCount;
    154.                                             if (touchCount == 1)
    155.                                             {
    156.                                                
    157.                                                 if(Input.GetTouch(0).phase == TouchPhase.Ended){
    158.                                                    
    159.                                                     roiPointList.Clear ();
    160.                                                 }
    161.                                                
    162.                                             }
    163.                                         #else
    164.                                         if (Input.GetMouseButtonUp (0)) {
    165.                                                 roiPointList.Clear ();
    166.                                         }
    167.                                         #endif
    168.                                 }
    169.                
    170.                
    171.                                 if (roiPointList.Count < 4) {
    172.                
    173.                                         #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
    174.                                             //Touch
    175.                                             int touchCount = Input.touchCount;
    176.                                             if (touchCount == 1)
    177.                                             {
    178.                                                 Touch t = Input.GetTouch(0);
    179.                                                 if(t.phase == TouchPhase.Ended){
    180.                                                     roiPointList.Add (convertScreenPoint (new Point (t.position.x, t.position.y), gameObject, Camera.main));
    181.                 //                                    Debug.Log ("touch X " + t.position.x);
    182.                 //                                    Debug.Log ("touch Y " + t.position.y);
    183.                
    184.                                                     if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
    185.                                                         roiPointList.RemoveAt (roiPointList.Count - 1);
    186.                                                     }
    187.                                                 }
    188.                                                
    189.                                             }
    190.                                         #else
    191.                                         //Mouse
    192.                                         if (Input.GetMouseButtonUp (0)) {
    193.                                                                
    194.                                                 roiPointList.Add (convertScreenPoint (new Point (Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
    195.                                                 //                                                Debug.Log ("mouse X " + Input.mousePosition.x);
    196.                                                 //                                                Debug.Log ("mouse Y " + Input.mousePosition.y);
    197.                
    198.                                                 if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
    199.                                                         roiPointList.RemoveAt (roiPointList.Count - 1);
    200.                                                 }
    201.                                         }
    202.                                         #endif
    203.                
    204.                                            
    205.                                         if (roiPointList.Count == 4) {
    206.                
    207.                                                 using (MatOfPoint roiPointMat = new MatOfPoint (roiPointList.ToArray ())) {
    208.                                                         roiRect = Imgproc.boundingRect (roiPointMat);
    209.                                                 }
    210.                
    211.                                                
    212.                                                 if (roiHistMat != null) {
    213.                                                         roiHistMat.Dispose ();
    214.                                                         roiHistMat = null;
    215.                                                 }
    216.                                                 roiHistMat = new Mat ();
    217.                
    218.                                                 using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
    219.                                                 using (Mat maskMat = new Mat ()) {
    220.                
    221.                                                                        
    222.                                                         Imgproc.calcHist (new List<Mat> (new Mat[]{roiHSVMat}), new MatOfInt (0), maskMat, roiHistMat, new MatOfInt (16), new MatOfFloat (0, 180));
    223.                                                         Core.normalize (roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);
    224.                                                
    225.                                                         //                                                        Debug.Log ("roiHist " + roiHistMat.ToString ());
    226.                                                 }
    227.                                         }
    228.                                 }
    229.                
    230.                                 if (points.Length < 4) {
    231.                
    232.                                         for (int i = 0; i < points.Length; i++) {
    233.                                                 Imgproc.circle (rgbaMat, points [i], 6, new Scalar (0, 0, 255, 255), 2);
    234.                                         }
    235.                
    236.                                 } else {
    237.                
    238.                                         for (int i = 0; i < 4; i++) {
    239.                                                 Imgproc.line (rgbaMat, points [i], points [(i + 1) % 4], new Scalar (255, 0, 0, 255), 2);
    240.                                         }
    241.                
    242.                                         Imgproc.rectangle (rgbaMat, roiRect.tl (), roiRect.br (), new Scalar (0, 255, 0, 255), 2);
    243.  
    244.  
    245.  
    246.  
    247.                                         GameObject quad = gameObject;
    248.                    
    249.                                         //center point of detected face
    250.                                         Point point2D = new Point (roiRect.x + roiRect.width / 2, roiRect.y + roiRect.height / 2);
    251.                    
    252.                                         float textureWidth = GetComponent<Renderer> ().material.mainTexture.width;
    253.                                         float textureHeight = GetComponent<Renderer> ().material.mainTexture.height;
    254.                    
    255.                                         Matrix4x4 transCenterM =
    256.                         Matrix4x4.TRS (new Vector3 (((float)point2D.x) - textureWidth / 2, (textureHeight - (float)point2D.y) - textureHeight / 2, 0), Quaternion.identity, new Vector3 (1, 1, 1));
    257.                    
    258.                    
    259.                                         Vector3 translation = new Vector3 (quad.transform.localPosition.x, quad.transform.localPosition.y, quad.transform.localPosition.z);
    260.                    
    261.                                         Quaternion rotation =
    262.                         Quaternion.Euler (quad.transform.localEulerAngles.x, quad.transform.localEulerAngles.y, quad.transform.localEulerAngles.z);
    263.                    
    264.                                         Vector3 scale = new Vector3 (quad.transform.localScale.x / textureWidth, quad.transform.localScale.y / textureHeight, 1);
    265.                    
    266.                                         Matrix4x4 trans2Dto3DM =
    267.                         Matrix4x4.TRS (translation, rotation, scale);
    268.                    
    269.                                         Matrix4x4 resultM = trans2Dto3DM * transCenterM;
    270.                    
    271.                                         Vector3 point3DVec = new Vector3 (0, 0, 0);
    272.                                         point3DVec = resultM.MultiplyPoint3x4 (point3DVec);
    273.                    
    274.                                         point3D.transform.position = point3DVec;
    275.                                         point3D.transform.eulerAngles = gameObject.transform.eulerAngles;
    276.  
    277.  
    278.                                 }
    279.                
    280.                                 Imgproc.putText (rgbaMat, "PLEASE TOUCH 4 POINTS", new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
    281.  
    282.                
    283. //                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
    284.                
    285.                                 Utils.matToTexture2D (rgbaMat, texture, colors);
    286.  
    287.  
    288.  
    289.  
    290.                         }
    291.  
    292.                 }
    293.    
    294.                 /// <summary>
    295.                 /// Raises the disable event.
    296.                 /// </summary>
    297.                 void OnDisable ()
    298.                 {
    299.                         webCamTextureToMatHelper.Dispose ();
    300.                 }
    301.  
    302.                 /// <summary>
    303.                 /// Raises the back button event.
    304.                 /// </summary>
    305.                 public void OnBackButton ()
    306.                 {
    307.                         #if UNITY_5_3 || UNITY_5_3_OR_NEWER
    308.             SceneManager.LoadScene ("OpenCVForUnitySample");
    309.                         #else
    310.                         Application.LoadLevel ("OpenCVForUnitySample");
    311.                         #endif
    312.                 }
    313.  
    314.                 /// <summary>
    315.                 /// Raises the play button event.
    316.                 /// </summary>
    317.                 public void OnPlayButton ()
    318.                 {
    319.                         webCamTextureToMatHelper.Play ();
    320.                 }
    321.  
    322.                 /// <summary>
    323.                 /// Raises the pause button event.
    324.                 /// </summary>
    325.                 public void OnPauseButton ()
    326.                 {
    327.                         webCamTextureToMatHelper.Pause ();
    328.                 }
    329.  
    330.                 /// <summary>
    331.                 /// Raises the stop button event.
    332.                 /// </summary>
    333.                 public void OnStopButton ()
    334.                 {
    335.                         webCamTextureToMatHelper.Stop ();
    336.                 }
    337.  
    338.                 /// <summary>
    339.                 /// Raises the change camera button event.
    340.                 /// </summary>
    341.                 public void OnChangeCameraButton ()
    342.                 {
    343.                         webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
    344.                 }
    345.  
    346.                 /// <summary>
    347.                 /// Converts the screen point.
    348.                 /// </summary>
    349.                 /// <returns>The screen point.</returns>
    350.                 /// <param name="screenPoint">Screen point.</param>
    351.                 /// <param name="quad">Quad.</param>
    352.                 /// <param name="cam">Cam.</param>
    353.                 static Point convertScreenPoint (Point screenPoint, GameObject quad, Camera cam)
    354.                 {
    355.                         Vector2 tl;
    356.                         Vector2 tr;
    357.                         Vector2 br;
    358.                         Vector2 bl;
    359.                    
    360.  
    361.                         tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
    362.                         tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
    363.                         br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
    364.                         bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
    365.  
    366.  
    367.                         Mat srcRectMat = new Mat (4, 1, CvType.CV_32FC2);
    368.                         Mat dstRectMat = new Mat (4, 1, CvType.CV_32FC2);
    369.  
    370.                        
    371.                         srcRectMat.put (0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
    372.                         dstRectMat.put (0, 0, 0.0, 0.0, quad.transform.localScale.x, 0.0, quad.transform.localScale.x, quad.transform.localScale.y, 0.0, quad.transform.localScale.y);
    373.  
    374.                        
    375.                         Mat perspectiveTransform = Imgproc.getPerspectiveTransform (srcRectMat, dstRectMat);
    376.  
    377. //                        Debug.Log ("srcRectMat " + srcRectMat.dump ());
    378. //                        Debug.Log ("dstRectMat " + dstRectMat.dump ());
    379. //                        Debug.Log ("perspectiveTransform " + perspectiveTransform.dump ());
    380.  
    381.                         MatOfPoint2f srcPointMat = new MatOfPoint2f (screenPoint);
    382.                         MatOfPoint2f dstPointMat = new MatOfPoint2f ();
    383.  
    384.                         Core.perspectiveTransform (srcPointMat, dstPointMat, perspectiveTransform);
    385.  
    386. //                        Debug.Log ("srcPointMat " + srcPointMat.dump ());
    387. //                        Debug.Log ("dstPointMat " + dstPointMat.dump ());
    388.  
    389.                         return dstPointMat.toArray () [0];
    390.                 }
    391.         }
    392. }
    camshift.PNG
     
  6. wirelessdreamer

    wirelessdreamer

    Joined:
    Apr 13, 2016
    Posts:
    134
    The Markerbased AR Sample is no longer working under unity 5.4. I just created a fresh project, and imported opencv and the marker based ar example. I get these errors now:

    Could not start graph
    UnityEngine.WebCamTexture:play()
    MarkerBasedARSample.<init>c__Iterator0:MoveNext() (at Assets/MarkerBasedARSample/Scripts/WebCamTextureToMatHelper.cs:186)
    UnityEngine.MonoBehaviour:StartCoroutine(IEnumerator)
    MarkerBasedARSample.WebCamTextureToMatHelper:Init() (at Assets/MarkerBasedARSample/Scripts/WebCamTextureToMatHelper.cs:115)
    MarkerBasedARSample.WebCamTextureMarkerBasedARSample:Start() (at Assets/MarkerBasedARSample/Scripts/WebCamTextureMarkerBasedARSample.cs:92)

    Could not pause pControl
    UnityEngine.WebCamTexture:play()
    MarkerBasedARSample.<init>c__Iterator0:MoveNext() (at Assets/MarkerBasedARSample/Scripts/WebCamTextureToMatHelper.cs:186)
    UnityEngine.MonoBehaviour:StartCoroutine(IEnumerator)
    MarkerBasedARSample.WebCamTextureToMatHelper:Init() (at Assets/MarkerBasedARSample/Scripts/WebCamTextureToMatHelper.cs:115)
    MarkerBasedARSample.WebCamTextureMarkerBasedARSample:Start() (at Assets/MarkerBasedARSample/Scripts/WebCamTextureMarkerBasedARSample.cs:92)
     
  7. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    This error seems to be caused by WebCam device.
    https://developer.vuforia.com/forum...scussion/critical-error-could-not-start-graph
     
  8. Dotby

    Dotby

    Joined:
    Nov 12, 2013
    Posts:
    32

    Code (CSharp):
    1.  
    2.  
    3. //_tex = inited Camera texture
    4.  
    5. ========================
    6.  
    7. _tmpMat = new Mat(
    8.             _tex.height,
    9.             _tex.width,
    10.             CvType.CV_8UC4
    11.         );
    12.  
    13. ========================
    14.  
    15. Utils.textureToMat(_camTex, _tmpMat);
    16.  
    17. Imgproc.putText (_tmpMat, "Hello!", new Point ((double)5, (double)_tmpMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
    18.  
    19. Texture2D texture = new Texture2D (_tmpMat.cols (), _tmpMat.rows (), TextureFormat.RGBA32, false);
    20. Utils.matToTexture2D (_tmpMat, texture);
    21.  
    22. _outRen.material.mainTexture = (Texture)texture;

    Where is my mistake?
    I just want to get texture from camera, print "Hello!" and set as Texture of right Rect.

    [Build and run from android]


    UPD!
    In release build mode all is good.
     
    Last edited: Aug 11, 2016
  9. Dotby

    Dotby

    Joined:
    Nov 12, 2013
    Posts:
    32
    Last edited: Aug 11, 2016
  10. shawww

    shawww

    Joined:
    Sep 30, 2014
    Posts:
    43
    Hey ya'll,

    Question: I'm using the WebCamTextureAR sample, and it works great. The problem is that on my mobile device, the aspect ratio is 4:3 and I would really love if it would fill up the screen (16:9). I can get the quad to resize but I can't figure out how to get the AR objects to update accordingly. If anyone has any insight on how to either scale up the quad and match the AR objects to the larger image, or to scale the camera to reflect this change, that would be super super super helpful!
     
  11. Dotby

    Dotby

    Joined:
    Nov 12, 2013
    Posts:
    32
    Upload a screenshot please. Visually it will be possible to better understand your question.
     
  12. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
  13. Dragonic8926

    Dragonic8926

    Joined:
    Mar 10, 2014
    Posts:
    34
    Thank you, that was perfect !
    IPA created without error !
     
  14. ina

    ina

    Joined:
    Nov 15, 2010
    Posts:
    1,085
  15. Dotby

    Dotby

    Joined:
    Nov 12, 2013
    Posts:
    32
    Could you test the functionality of the boot options?
    I'm working on a version of Unity 5.4.0.
    Launch BlobDetektor and nothing happens.
    Remove _detector.read (Utils.getFilePath ("blobparams.yml")); (it turns out that the detector takes the default settings in the code)
    Only then the detector finds objects.

    Your YAML file is different from the design with the
    https://docs.unity3d.com/Manual/YAMLSceneExample.html
     
  16. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
  17. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    result screenshot on my enviroment.
    Unity 5.0.4f3
    OpenCVforUnity 2.0.5

    simpleblob.PNG

    if remove blobDetector.read (Utils.getFilePath ("blobparams.yml"));
    simpleblobwithoutparam.PNG

    Also, Format of yaml file Please refer to the information of OpenCV Java.
     
    Dotby likes this.
  18. ina

    ina

    Joined:
    Nov 15, 2010
    Posts:
    1,085
    tesserect works on emgucv plugin for unity - your competitor. so, there should be a way.
     
  19. ShaBax

    ShaBax

    Joined:
    Oct 18, 2015
    Posts:
    1
  20. ImasconoArt

    ImasconoArt

    Joined:
    Aug 18, 2015
    Posts:
    2
    Hi,

    I'm trying to build an iOS project in Unity Cloud, everything appears to be ok, but when I have to use your plugin the screen goes black.

    I have no problem building in xCode, the plugin works fine.

    Can you help me?

    Thanks in advance.
     
  21. yamertyourgod

    yamertyourgod

    Joined:
    Apr 20, 2015
    Posts:
    1
    Hi there, I have wrote the code for detection of rectangular objects corners from webcam, and obtained 4 points each frame, but the points are not really stable ( shakes), so I need some method to reduce those shakes and I found out that Kalman filter is great for this. Unfortunately I couldn't translate cpp examples properly. I use the opencv for unity wrapper. Please, help to deal with it. I need some example of implementing of Kalman filter based on this wrapper or c#(without opencv) or java code. Any help will appreciated.
     
  22. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    When I build OpenCVforUnitySample in Unity Cloud Build before v2.0.6 release, such problems did not occur.
    Could you tell me the ios setting of UnityCloudBuild?
    Unity version
    Xcode version
     
  23. shawww

    shawww

    Joined:
    Sep 30, 2014
    Posts:
    43
    This is what the default FaceLandmarkDetector example looks like– basically, I'm trying to get rid of the blue space. The iPhone shoots at 4:3 aspect ratio, so there's space above and below. I can hardcode scale the Main Camera and AR Camera but I'd like to do it dynamically, based on the screen dimensions of the phone.
     

    Attached Files:

  24. Dotby

    Dotby

    Joined:
    Nov 12, 2013
    Posts:
    32
    Do you want to stream to accommodate screen sizes?
    If so, then I do through the code. (Stretching height relative to the width) to the height of the screen.
     
  25. Streamfall

    Streamfall

    Joined:
    Aug 8, 2011
    Posts:
    43
    Hi Enox & all,

    I'm attempting to use grabcut using a Mask, as shown here : http://docs.opencv.org/3.1.0/d8/d83/tutorial_py_grabcut.html

    I am using this as a model :
    http://stackoverflow.com/questions/14111716/how-to-set-a-mask-image-for-grabcut-in-opencv

    I had success with grabcut by initializing with a rect, but the INIT_WITH_MASK version does not seem to be working.

    The top image shows INIT_WITH_RECT working.



    Below, is an attempt at Grabcut INIT_WITH_MASK. It is possible that I am not understanding how this is used, but this is the result after passing a the resulting image from version one as well as a mask that I drew manually, see the bottom square in this image. To me, this looks like it is just masking out the pixels, not reapplying the algorithm :





    Without digging too deep, here is the code for the second iteration of the algorithm. Any advice?


    Code (CSharp):
    1. public void Grab()
    2.     {
    3.         Texture2D sourceImage = Resources.Load (sourceImg0) as Texture2D;
    4.         one.material.mainTexture = sourceImage;
    5.         Mat image = new Mat(sourceImage.height, sourceImage.width, CvType.CV_8UC3);
    6.         Utils.texture2DToMat(sourceImage, image);
    7.      
    8.         //http://stackoverflow.com/questions/30872353/optimizing-performance-of-grabcut-in-opencv-java
    9.         //OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(image.width () / 4, image.height () / 4, 3 * image.width () / 4, 3 * image.height () / 4);
    10.         OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(0, 0, 9 * image.width () / 10, 9 * image.height () / 10);
    11.         Mat gCutResult = new Mat ();
    12.         Mat bgdModel = new Mat ();
    13.         Mat fgModel = new Mat ();
    14.         Mat source = new Mat (1, 1, CvType.CV_8U, new Scalar (3));
    15.         Imgproc.grabCut (image, gCutResult, rect, bgdModel, fgModel, 2, Imgproc.GC_INIT_WITH_RECT);
    16.  
    17.         maskRend0.material.mainTexture = ReturnTextureFromMat (gCutResult);
    18.  
    19.         Core.compare (gCutResult, source, gCutResult, Core.CMP_EQ);
    20.         Mat foreground = new Mat (image.size (), CvType.CV_8UC3, new Scalar (255, 255, 255));
    21.         image.copyTo (foreground, gCutResult);
    22.      
    23.         //do we need this step?
    24.         Imgproc.cvtColor(foreground, foreground, Imgproc.COLOR_BGR2BGRA);
    25.      
    26.         mergeResult.material.mainTexture = ConvertMatToAlphaTexture (foreground);
    27.         //foreground = ConvertMatToAlphaMat (foreground);
    28.  
    29.         firstResult = foreground;
    30.  
    31.         //try some different texture formats..
    32.         Texture2D resultTexture = new Texture2D(firstResult.cols(), firstResult.rows(), TextureFormat.RGBA32, false);
    33.         Utils.matToTexture2D (firstResult, resultTexture);
    34.         maskRend.material.mainTexture = resultTexture;
    35.      
    36.     }
    37.  
    38.     /// <summary>
    39.     /// This version is to be used after initial grabcut
    40.     /// </summary>
    41.     /// <param name="image">Image.</param>
    42.     public void GrabMaskFromButton2()
    43.     {
    44.         Mat image = new Mat ();
    45.  
    46.         if (string.IsNullOrEmpty (sourceImg1)) {
    47.                      image = firstResult;
    48.                     one.material.mainTexture = ReturnTextureFromMat (image);
    49.         } else {
    50.  
    51.             Texture2D sourceImage = Resources.Load (sourceImg1) as Texture2D;
    52.             one.material.mainTexture = sourceImage;
    53.              image = new Mat (sourceImage.height, sourceImage.width, CvType.CV_8UC3);
    54.             Utils.texture2DToMat (sourceImage, image);
    55.         }
    56.  
    57.  
    58.  
    59.  
    60.         Texture2D maskImage = Resources.Load (maskImg0) as Texture2D;
    61.         Mat maskMat = new Mat(maskImage.width, maskImage.height, CvType.CV_8U,new Scalar( 0)) ; //CV_8UC3  //CV_16S //CV_8UC1
    62.         Utils.texture2DToMat(maskImage, maskMat);
    63.         maskRend0.material.mainTexture = ReturnTextureFromMat (maskMat);
    64.         ConvertToOpencvValues (maskMat);
    65.         //PrintMatProperties (maskMat);
    66.  
    67.         //http://stackoverflow.com/questions/30872353/optimizing-performance-of-grabcut-in-opencv-java
    68.         //OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(image.width () / 4, image.height () / 4, 3 * image.width () / 4, 3 * image.height () / 4);
    69.         OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(0, 0, 9 * image.width () / 10, 9 * image.height () / 10);
    70.         //Mat gCutResult = new Mat ();
    71.         Mat bgdModel = new Mat ();
    72.         Mat fgModel = new Mat ();
    73.  
    74.         Imgproc.grabCut (image, maskMat, rect, bgdModel, fgModel, 1, Imgproc.GC_INIT_WITH_MASK); //GC_INIT_WITH_MASK
    75.  
    76.         maskRend.material.mainTexture = ReturnTextureFromMat (maskMat);
    77.  
    78.         mergeResult.material.mainTexture = null;
    79.  
    80.         convertToHumanValues (maskMat);
    81.         //Imgproc.threshold (maskMat, maskMat, 128, 255, Imgproc.THRESH_TOZERO);
    82.  
    83.         Mat foreground = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0, 0, 0));
    84.         image.copyTo (foreground, maskMat);
    85.  
    86.         two.material.mainTexture = ReturnTextureFromMat (foreground);
    87.  
    88.     }
    89.  
    90.     private static void convertToHumanValues(Mat mask) {
    91.         byte[] buffer = new byte[3];
    92.         for (int x = 0; x < mask.rows(); x++) {
    93.             for (int y = 0; y < mask.cols(); y++) {
    94.                 mask.get(x, y, buffer);
    95.                 int value = buffer[0];
    96.                 if (value == Imgproc.GC_BGD) {
    97.                     buffer[0] = 0; // for sure background
    98.                 } else if (value == Imgproc.GC_PR_BGD) {
    99.                     buffer[0] = 85; // probably background
    100.                 } else if (value == Imgproc.GC_PR_FGD) {
    101.                     buffer[0] = (byte) 170; // probably foreground
    102.                 } else {
    103.                     buffer[0] = (byte) 255; // for sure foreground
    104.  
    105.                 }
    106.                 mask.put(x, y, buffer);
    107.             }
    108.         }
    109.     }
    110.  
    111.  
    112.     private static void ConvertToOpencvValues(Mat mask) {
    113.         byte[] buffer = new byte[3];
    114.         for (int x = 0; x < mask.rows(); x++) {
    115.             for (int y = 0; y < mask.cols(); y++) {
    116.                 mask.get(x, y, buffer);
    117.                 int value = buffer[0];
    118.                 //if 0, do nothing...
    119. //                if (value == 0) {
    120. //                    print ("Doing nothing");
    121.                 //                }else
    122.                 if (value > 0 && value < 64) {
    123.                     buffer[0] = Imgproc.GC_BGD; // for sure background
    124.                     //print("BGD");
    125.                 } else if (value >= 64 && value < 128) {
    126.                     buffer[0] = Imgproc.GC_PR_BGD; // probably background
    127.                     //print("PBGD");
    128.                 } else if (value >= 128 && value < 192) {
    129.                     buffer[0] = Imgproc.GC_PR_FGD; // probably foreground
    130.                     //print("PFGD");
    131.                 } else {
    132.                     buffer[0] = Imgproc.GC_FGD; // for sure foreground
    133.                     //print("FGD");
    134.                 }
    135.                 mask.put(x, y, buffer);
    136.             }
    137.         }
    138.     }
     
    Last edited: Aug 20, 2016
  26. nykwil

    nykwil

    Joined:
    Feb 28, 2015
    Posts:
    49
    I got it working pretty easily. There's a lot of code there to go through to see any problems. It's weird the mask that gets returned is 0 == background, 1 == foreground, 2 == background 3 == foreground. So I can't think of a nice way to create an alpha mask without doing in manually. Advice would be great. Is there a even value compare?

    Code (csharp):
    1.    
    2. public Mat GrabCut(Mat image, Mat mask) {
    3.         Rect rectangle = new Rect(0, 0, image.cols(), image.rows());
    4.  
    5.         Imgproc.grabCut(image, mask, rectangle, new Mat(), new Mat(), 8, Imgproc.GC_INIT_WITH_MASK);
    6.         Mat result = new Mat(mask.rows(), mask.cols(), CvType.CV_8UC4);
    7.         copyMaskIntoResult(mask, image, result);
    8.         return result;
    9. }
    10.  
    11. // copies img into result where the mask is foreground otherwise copies 0,0,0,0 pixel
    12. static void copyMaskIntoResult(Mat mask, Mat img, Mat result) {
    13.         UnityEngine.Assertions.Assert.IsTrue(mask.rows() == img.rows() && mask.cols() == img.cols());
    14.         byte[] buffer = new byte[3];
    15.         byte[] impix = new byte[4];
    16.         byte[] bgpix = new byte[4] { 0, 0, 0, 0 };
    17.  
    18.         for (int r = 0; r < mask.rows(); r++) {
    19.             for (int c = 0; c < mask.cols(); c++) {
    20.                 mask.get(r, c, buffer);
    21.                 int value = buffer[0];
    22.                 if (value == Imgproc.GC_BGD || value == Imgproc.GC_PR_BGD) {
    23.                     result.put(r, c, bgpix);
    24.                 }
    25.                 else {
    26.                     img.get(r, c, buffer);
    27.                     impix[0] = buffer[0];
    28.                     impix[1] = buffer[1];
    29.                     impix[2] = buffer[2];
    30.                     impix[3] = 255;
    31.                     result.put(r, c, impix);
    32.                 }
    33.             }
    34.         }
    35.     }
    36.  
     
  27. Streamfall

    Streamfall

    Joined:
    Aug 8, 2011
    Posts:
    43
    nykwil - did you get a result that looks like the grabcut algorithm is working? Can you post your result images?
     
  28. ImasconoArt

    ImasconoArt

    Joined:
    Aug 18, 2015
    Posts:
    2
    here it is

    upload_2016-8-22_8-51-31.png
     
  29. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Please try the following code.(changed to scale Quad to the entire screen.)
    Code (CSharp):
    1. /// <summary>
    2.         /// Raises the web cam texture to mat helper inited event.
    3.         /// </summary>
    4.         public void OnWebCamTextureToMatHelperInited ()
    5.         {
    6.             Debug.Log ("OnWebCamTextureToMatHelperInited");
    7.            
    8.             Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
    9.            
    10.             colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
    11.             texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
    12.  
    13.             gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
    14.             Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
    15.  
    16.             float width = gameObject.transform.localScale.x;
    17.             float height = gameObject.transform.localScale.y;
    18.  
    19.             float imageScale = 1.0f;
    20.             float widthScale = (float)Screen.width / width;
    21.             float heightScale = (float)Screen.height / height;
    22.             /*
    23.             if (widthScale < heightScale)
    24.             {
    25.                 Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
    26.                 imageScale = (float)Screen.height / (float)Screen.width;
    27.             }
    28.             else
    29.             {
    30.                 Camera.main.orthographicSize = height / 2;
    31.             }
    32.             */
    33.  
    34.             if (widthScale < heightScale) {
    35.                 Camera.main.orthographicSize = height / 2;
    36.                 imageScale = 0.5f;
    37.             } else {
    38.                 Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
    39.                 imageScale = (float)Screen.height / (float)Screen.width / 0.5f;
    40.             }
    41.            
    42.  
    43.  
    44.             gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
    45.                                    
    46.                                    
    47.             int max_d = Mathf.Max (webCamTextureMat.rows (), webCamTextureMat.cols ());
    48.             camMatrix = new Mat (3, 3, CvType.CV_64FC1);
    49.             camMatrix.put (0, 0, max_d);
    50.             camMatrix.put (0, 1, 0);
    51.             camMatrix.put (0, 2, webCamTextureMat.cols () / 2.0f);
    52.             camMatrix.put (1, 0, 0);
    53.             camMatrix.put (1, 1, max_d);
    54.             camMatrix.put (1, 2, webCamTextureMat.rows () / 2.0f);
    55.             camMatrix.put (2, 0, 0);
    56.             camMatrix.put (2, 1, 0);
    57.             camMatrix.put (2, 2, 1.0f);
    58.                                    
    59.             Size imageSize = new Size (webCamTextureMat.cols () * imageScale, webCamTextureMat.rows () * imageScale);
    60.             double apertureWidth = 0;
    61.             double apertureHeight = 0;
    62.             double[] fovx = new double[1];
    63.             double[] fovy = new double[1];
    64.             double[] focalLength = new double[1];
    65.             Point principalPoint = new Point ();
    66.             double[] aspectratio = new double[1];
    67.                                    
    68.                                    
    69.                                    
    70.                                    
    71.             Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
    72.                                    
    73.             Debug.Log ("imageSize " + imageSize.ToString ());
    74.             Debug.Log ("apertureWidth " + apertureWidth);
    75.             Debug.Log ("apertureHeight " + apertureHeight);
    76.             Debug.Log ("fovx " + fovx [0]);
    77.             Debug.Log ("fovy " + fovy [0]);
    78.             Debug.Log ("focalLength " + focalLength [0]);
    79.             Debug.Log ("principalPoint " + principalPoint.ToString ());
    80.             Debug.Log ("aspectratio " + aspectratio [0]);
    81.                                    
    82.                                    
    83.             if (widthScale < heightScale) {
    84.                 ARCamera.fieldOfView = (float)fovx [0];
    85.             } else {
    86.                 ARCamera.fieldOfView = (float)fovy [0];
    87.             }
    88.  
    89.                                    
    90.             Debug.Log ("camMatrix " + camMatrix.dump ());
    91.                                    
    92.                                    
    93.             distCoeffs = new MatOfDouble (0, 0, 0, 0);
    94.             Debug.Log ("distCoeffs " + distCoeffs.dump ());
    95.                                    
    96.                                    
    97.                                    
    98.             invertYM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, -1, 1));
    99.             Debug.Log ("invertYM " + invertYM.ToString ());
    100.            
    101.             invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
    102.             Debug.Log ("invertZM " + invertZM.ToString ());
    103.            
    104.            
    105.             axes.SetActive (false);
    106.             head.SetActive (false);
    107.             rightEye.SetActive (false);
    108.             leftEye.SetActive (false);
    109.             mouth.SetActive (false);
    110.  
    111.  
    112.             mouthParticleSystem = mouth.GetComponentsInChildren<ParticleSystem> (true);
    113.  
    114.         }
     
  30. Streamfall

    Streamfall

    Joined:
    Aug 8, 2011
    Posts:
    43
    Hi Nykwil,

    I used your code but the result is the same as my previous attempt. Using your code directly, I received this error :
    CvException: Provided data element number (3) should be multiple of the Mat channels count (4)
    OpenCVForUnity.Mat.get (Int32 row, Int32 col, System.Byte[] data) (at Assets/OpenCVForUnity/org/opencv/core/Mat.cs:3071)
    InkmageGrab.copyMaskIntoResult (OpenCVForUnity.Mat mask, OpenCVForUnity.Mat img, OpenCVForUnity.Mat result) (at Assets/InkmageGrab.cs:163)

    Which I fixed by changing : byte[]buffer=newbyte[3]; to byte[]buffer=newbyte[4];

    Does that mean my mask is not initialized correctly? How was your mask initialized and what is the result?

    Thanks for looking into it,
    Bryan
     
    Last edited: Aug 22, 2016
  31. Streamfall

    Streamfall

    Joined:
    Aug 8, 2011
    Posts:
    43
    By the way,
    The way I read into this algorithm is that the second pass initializing with a mask, is not supposed to just 'mask out' the image. It is supposed to help in removing or adding pieces that were before missed by the algorithm. Right now, this code is merely masking things out.

    Please let me know if I am misunderstanding?
    Bryan
     
  32. ab_salazar

    ab_salazar

    Joined:
    Aug 23, 2016
    Posts:
    7
    Hello, I bought your asset today and I was wondering if I can have an x amount of AR Objects display at the same time depending on the number of faces tracked using the FaceTracker AR Sample, and if so, could you tell me what to change in the original code? Thanks in advance :)
     
  33. Sayugo

    Sayugo

    Joined:
    Aug 23, 2016
    Posts:
    6
    Hello, I just bought your asset and I can not find a method findHomography. Can you explain or give any other alternative? I do not even find a homography example or tutorial on your assets. Thanks in advance
     
  34. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    These settings might be the cause of the problem.
    Build Asset Bundles = No
    Copy to Streaming Assets = No
    The other my settings are the same as your settings.
     
  35. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Code (CSharp):
    1. using UnityEngine;
    2. using System.Collections;
    3.  
    4. #if UNITY_5_3 || UNITY_5_3_OR_NEWER
    5. using UnityEngine.SceneManagement;
    6. #endif
    7. using OpenCVForUnity;
    8.  
    9. namespace OpenCVForUnitySample
    10. {
    11.     /// <summary>
    12.     /// Texture2D to mat sample.
    13.     /// </summary>
    14.     public class GrabCutSample : MonoBehaviour
    15.     {
    16.  
    17.         // Use this for initialization
    18.         void Start ()
    19.         {
    20.  
    21.             Texture2D imageTexture = Resources.Load ("lena_grabcut") as Texture2D;
    22.  
    23.             Mat image = new Mat (imageTexture.height, imageTexture.width, CvType.CV_8UC3);
    24.  
    25.             Utils.texture2DToMat (imageTexture, image);
    26.             Debug.Log ("image.ToString " + image.ToString ());
    27.  
    28.             Texture2D maskTexture = Resources.Load ("lena_grabcut_mask") as Texture2D;
    29.  
    30.             Mat mask = new Mat (imageTexture.height, imageTexture.width, CvType.CV_8UC1);
    31.  
    32.             Utils.texture2DToMat (maskTexture, mask);
    33.             Debug.Log ("mask.ToString " + mask.ToString ());
    34.  
    35.  
    36.  
    37.             OpenCVForUnity.Rect rectangle = new OpenCVForUnity.Rect (10, 10, image.cols () - 20, image.rows () - 20);
    38.  
    39.             Mat bgdModel = new Mat (); // extracted features for background
    40.             Mat fgdModel = new Mat (); // extracted features for foreground
    41.             Mat source = new Mat (1, 1, CvType.CV_8U, new Scalar (0));
    42.  
    43.             convertToOpencvValues (mask); // from human readable values to OpenCV values
    44.  
    45.             int iterCount = 1;
    46. //            Imgproc.grabCut (image, mask, rectangle, bgdModel, fgdModel, iterCount, Imgproc.GC_INIT_WITH_RECT);
    47.             Imgproc.grabCut (image, mask, rectangle, bgdModel, fgdModel, iterCount, Imgproc.GC_INIT_WITH_MASK);
    48.  
    49.  
    50.             convertToHumanValues (mask); // back to human readable values
    51.             Imgproc.threshold (mask, mask, 128, 255, Imgproc.THRESH_TOZERO);
    52.  
    53.             Mat foreground = new Mat (image.size (), CvType.CV_8UC1, new Scalar (0, 0, 0));
    54.             image.copyTo (foreground, mask);
    55.  
    56.  
    57.             Texture2D texture = new Texture2D (image.cols (), image.rows (), TextureFormat.RGBA32, false);
    58.  
    59.             Utils.matToTexture2D (foreground, texture);
    60.  
    61.             gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
    62.  
    63.         }
    64.  
    65.         private static void convertToHumanValues (Mat mask)
    66.         {
    67.             byte[] buffer = new byte[3];
    68.             for (int x = 0; x < mask.rows (); x++) {
    69.                 for (int y = 0; y < mask.cols (); y++) {
    70.                     mask.get (x, y, buffer);
    71.                     int value = buffer [0];
    72.                     if (value == Imgproc.GC_BGD) {
    73.                         buffer [0] = 0; // for sure background
    74.                     } else if (value == Imgproc.GC_PR_BGD) {
    75.                         buffer [0] = 85; // probably background
    76.                     } else if (value == Imgproc.GC_PR_FGD) {
    77.                         buffer [0] = (byte)170; // probably foreground
    78.                     } else {
    79.                         buffer [0] = (byte)255; // for sure foreground
    80.  
    81.                     }
    82.                     mask.put (x, y, buffer);
    83.                 }
    84.             }
    85.         }
    86.  
    87.         /**
    88.      * Converts level of grayscale into OpenCV values. White - foreground, Black
    89.      * - background.
    90.      *
    91.      * @param mask
    92.      */
    93.         private static void convertToOpencvValues (Mat mask)
    94.         {
    95.             byte[] buffer = new byte[3];
    96.             for (int x = 0; x < mask.rows (); x++) {
    97.                 for (int y = 0; y < mask.cols (); y++) {
    98.                     mask.get (x, y, buffer);
    99.                     int value = buffer [0];
    100.                     if (value >= 0 && value < 64) {
    101.                         buffer [0] = Imgproc.GC_BGD; // for sure background
    102.                     } else if (value >= 64 && value < 128) {
    103.                         buffer [0] = Imgproc.GC_PR_BGD; // probably background
    104.                     } else if (value >= 128 && value < 192) {
    105.                         buffer [0] = Imgproc.GC_PR_FGD; // probably foreground
    106.                     } else {
    107.                         buffer [0] = Imgproc.GC_FGD; // for sure foreground
    108.  
    109.                     }
    110.                     mask.put (x, y, buffer);
    111.                 }
    112.             }
    113.  
    114.         }
    115.  
    116.    
    117.         // Update is called once per frame
    118.         void Update ()
    119.         {
    120.    
    121.         }
    122.  
    123.         public void OnBackButton ()
    124.         {
    125.             #if UNITY_5_3 || UNITY_5_3_OR_NEWER
    126.             SceneManager.LoadScene ("OpenCVForUnitySample");
    127.             #else
    128.                         Application.LoadLevel ("OpenCVForUnitySample");
    129.             #endif
    130.         }
    131.  
    132.     }
    133. }
    Imgproc.GC_INIT_WITH_RECT
    grabcut_rect.PNG
    Imgproc.GC_INIT_WITH_MASK
    grabcut_mask.PNG
     
  36. ina

    ina

    Joined:
    Nov 15, 2010
    Posts:
    1,085
    Checking on the status of OCR Tesseract for OpenCV for Unity package
     
  37. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    findHomography() has been implemented in OpenCV for Unity.
    http://enoxsoftware.github.io/OpenC...alib3d.html#a4fffd0de09e28dacbf1c7e65d8b3544b
    Please refer to the usage example of OpenCV Java.
     
  38. Streamfall

    Streamfall

    Joined:
    Aug 8, 2011
    Posts:
    43

    Hi Enox,

    If you notice - this example actually just masks the image. It doesn't do a grabcut. Not the expected result.

    If you take a look at the OpenCV docs here, near the bottom of the page... : http://docs.opencv.org/3.1.0/d8/d83/tutorial_py_grabcut.html

    You can see that the user drew a white line across a small area of the hair (not the whole hair, precisely - for that would be a mask). A grey line around the grass. He did not draw a line across all of the grass, just a line. Drawing around the whole grass would be a mask. This algorithm isn't supposed to simply mask things.
     
    Last edited: Aug 26, 2016
  39. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    It seems to need to set "Imgproc.GC_PR_BGD" to the background Area.
    Code (CSharp):
    1. using UnityEngine;
    2. using System.Collections;
    3.  
    4. #if UNITY_5_3 || UNITY_5_3_OR_NEWER
    5. using UnityEngine.SceneManagement;
    6. #endif
    7. using OpenCVForUnity;
    8.  
    9. namespace OpenCVForUnitySample
    10. {
    11.     /// <summary>
    12.     /// Texture2D to mat sample.
    13.     /// </summary>
    14.     public class GrabCutSample : MonoBehaviour
    15.     {
    16.  
    17.         // Use this for initialization
    18.         void Start ()
    19.         {
    20.  
    21. //            Texture2D imageTexture = Resources.Load ("lena_grabcut") as Texture2D;
    22.             Texture2D imageTexture = Resources.Load ("messi5") as Texture2D;
    23.  
    24.             Mat image = new Mat (imageTexture.height, imageTexture.width, CvType.CV_8UC3);
    25.  
    26.             Utils.texture2DToMat (imageTexture, image);
    27.             Debug.Log ("image.ToString " + image.ToString ());
    28.  
    29. //            Texture2D maskTexture = Resources.Load ("lena_grabcut_mask") as Texture2D;
    30.             Texture2D maskTexture = Resources.Load ("messi5_mask") as Texture2D;
    31.  
    32.             Mat mask = new Mat (imageTexture.height, imageTexture.width, CvType.CV_8UC1);
    33.  
    34.             Utils.texture2DToMat (maskTexture, mask);
    35.             Debug.Log ("mask.ToString " + mask.ToString ());
    36.  
    37.  
    38.  
    39.             OpenCVForUnity.Rect rectangle = new OpenCVForUnity.Rect (10, 10, image.cols () - 20, image.rows () - 20);
    40.  
    41.             Mat bgdModel = new Mat (); // extracted features for background
    42.             Mat fgdModel = new Mat (); // extracted features for foreground
    43.             Mat source = new Mat (1, 1, CvType.CV_8U, new Scalar (0));
    44.  
    45.             convertToOpencvValues (mask); // from human readable values to OpenCV values
    46.  
    47.             int iterCount = 5;
    48. //            Imgproc.grabCut (image, mask, rectangle, bgdModel, fgdModel, iterCount, Imgproc.GC_INIT_WITH_RECT);
    49.             Imgproc.grabCut (image, mask, rectangle, bgdModel, fgdModel, iterCount, Imgproc.GC_INIT_WITH_MASK);
    50.  
    51.  
    52.  
    53.             convertToHumanValues (mask); // back to human readable values
    54.             Imgproc.threshold (mask, mask, 128, 255, Imgproc.THRESH_TOZERO);
    55.  
    56.             Mat foreground = new Mat (image.size (), CvType.CV_8UC3, new Scalar (0, 0, 0));
    57.             image.copyTo (foreground, mask);
    58.  
    59.  
    60.             Texture2D texture = new Texture2D (image.cols (), image.rows (), TextureFormat.RGBA32, false);
    61.  
    62.             Utils.matToTexture2D (foreground, texture);
    63.  
    64.             gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
    65.  
    66.         }
    67.  
    68.         private static void convertToHumanValues (Mat mask)
    69.         {
    70.             byte[] buffer = new byte[1];
    71.             for (int x = 0; x < mask.rows (); x++) {
    72.                 for (int y = 0; y < mask.cols (); y++) {
    73.                     mask.get (x, y, buffer);
    74.                     int value = buffer [0];
    75.                     if (value == Imgproc.GC_BGD) {
    76.                         buffer [0] = 0; // for sure background
    77.                     } else if (value == Imgproc.GC_PR_BGD) {
    78.                         buffer [0] = 85; // probably background
    79.                     } else if (value == Imgproc.GC_PR_FGD) {
    80.                         buffer [0] = (byte)170; // probably foreground
    81.                     } else {
    82.                         buffer [0] = (byte)255; // for sure foreground
    83.  
    84.                     }
    85.                     mask.put (x, y, buffer);
    86.                 }
    87.             }
    88.         }
    89.  
    90.         /**
    91.      * Converts level of grayscale into OpenCV values. White - foreground, Black
    92.      * - background.
    93.      *
    94.      * @param mask
    95.      */
    96.         private static void convertToOpencvValues (Mat mask)
    97.         {
    98.             byte[] buffer = new byte[1];
    99.             for (int x = 0; x < mask.rows (); x++) {
    100.                 for (int y = 0; y < mask.cols (); y++) {
    101.                     mask.get (x, y, buffer);
    102.                     int value = buffer [0];
    103.                     if (value >= 0 && value < 64) {
    104.                         buffer [0] = Imgproc.GC_BGD; // for sure background
    105.                     } else if (value >= 64 && value < 128) {
    106.                         buffer [0] = Imgproc.GC_PR_BGD; // probably background
    107.                     } else if (value >= 128 && value < 192) {
    108.                         buffer [0] = Imgproc.GC_PR_FGD; // probably foreground
    109.                     } else {
    110.                         buffer [0] = Imgproc.GC_FGD; // for sure foreground
    111.  
    112.                     }
    113.                     mask.put (x, y, buffer);
    114.                 }
    115.             }
    116.  
    117.         }
    118.  
    119.    
    120.         // Update is called once per frame
    121.         void Update ()
    122.         {
    123.    
    124.         }
    125.  
    126.         public void OnBackButton ()
    127.         {
    128.             #if UNITY_5_3 || UNITY_5_3_OR_NEWER
    129.             SceneManager.LoadScene ("OpenCVForUnitySample");
    130.             #else
    131.                         Application.LoadLevel ("OpenCVForUnitySample");
    132.             #endif
    133.         }
    134.  
    135.     }
    136. }
    137.  
    messi5.png messi5_mask.png
    grabcut_mask_messi.PNG
     
  40. Sayugo

    Sayugo

    Joined:
    Aug 23, 2016
    Posts:
    6
    Thanks enox, my mistake. I was be able to use findhomoghrapy() now. I still have another problem.
    1. It is possible to used opencv to create NFT AR application without third-party SDK like Vuforia? If it can, could you give me a clue?
    2. I was download your sample OpenCV with Vuforia in github and I get few missing prefab. Can you fix it?

    Thanks fro your support
     
  41. Kelekkis

    Kelekkis

    Joined:
    Sep 8, 2014
    Posts:
    2
    Hello,

    Been having problems when trying to transform rect.center to Vector3.

    Code (CSharp):
    1. Imgproc.circle(rgbaMat, rect.center, 6, new Scalar(0, 255, 0, 255), -1);
    This code draws circle in the right spot but when I am trying to move Unity object to same position, for example with this code:

    Code (CSharp):
    1. Vector3 newPos = Camera.main.ScreenToWorldPoint(new Vector3((float)rect.center.x, camHeight - (float)rect.center.y, Camera.main.nearClipPlane + 10f));
    2. moveableObject.GetComponent<Transform>().position = newPos;
    The Vector3 position is always wrong by some offset.
     
  42. NeedNap

    NeedNap

    Joined:
    Nov 1, 2012
    Posts:
    22
    Hello,
    I just bought OpenCV by the Asset Store because I have to create a desktop application that allows facial recognition to entertain the kids. It should be like Masquerade (http://msqrd.me) but much simpler.

    I also downloaded FaceTracker (https://www.assetstore.unity3d.com/en/#!/content/35284) and using the sample scene () as a test I see that the facial recognition is not always very accurate, especially if I rotate my face by 3/4.
    Are there any special settings to use to have less flickering of the 3D objects placed over the face?

    Any help is appreciated because the project has a very tight deadline.
    Thanks
     
  43. Streamfall

    Streamfall

    Joined:
    Aug 8, 2011
    Posts:
    43
    Great, thank you Enox - I'll try and implement this and get back to you. It should be helpful!
     
  44. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    1. For now, there is no NFT AR sample. but, I think it is feasible.
    2. I've updated the setup procedure a few days ago.
     
  45. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    http://forum.unity3d.com/threads/released-opencv-for-unity.277080/page-16#post-2704977
     
  46. shawww

    shawww

    Joined:
    Sep 30, 2014
    Posts:
    43
    Has anyone been successful in integrating NatCam with the Facetracker example? I've been having a heck of a time with it.
     
  47. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    There is no such sample for now.
     
  48. shawww

    shawww

    Joined:
    Sep 30, 2014
    Posts:
    43
    Actually, the maker of NatCam sent me some scripts!

    NatCamTextureARSample.cs

    Code (CSharp):
    1. #define OPENCV
    2. #define NATCAM_MODE //Uncomment this to use NatCamToMatHelper and NatCam instead of WebCamTexture
    3.  
    4. #if !NATCAM_MODE
    5.     #define WEBCAMTEXTURE_MODE
    6. #endif
    7.  
    8. #if OPENCV
    9.  
    10. using UnityEngine;
    11. using System.Collections.Generic;
    12.  
    13. #if UNITY_5_3 || UNITY_5_3_OR_NEWER
    14. using UnityEngine.SceneManagement;
    15. #endif
    16. using OpenCVForUnity;
    17. using DlibFaceLandmarkDetector;
    18.  
    19. #if NATCAM_MODE
    20. using CameraTextureToMatHelper = NatCamU.Extensions.NatCamToMatHelper;
    21. #elif WEBCAMTEXTURE_MODE
    22. using CameraTextureToMatHelper = OpenCVForUnitySample.WebCamTextureToMatHelper;
    23. #endif
    24.  
    25. namespace DlibFaceLandmarkDetectorSample {
    26.     /// <summary>
    27.     /// Face tracker AR from WebCamTexture Sample.
    28.     /// This sample was referring to http://www.morethantechnical.com/2012/10/17/head-pose-estimation-with-opencv-opengl-revisited-w-code/
    29.     /// and use effect asset from http://ktk-kumamoto.hatenablog.com/entry/2014/09/14/092400
    30.     /// </summary>
    31.     [RequireComponent(typeof(CameraTextureToMatHelper))]
    32.     public class NatcamTextureARSample : MonoBehaviour {
    33.         /// <summary>
    34.         /// The should draw face points.
    35.         /// </summary>
    36.         public bool shouldDrawFacePoints;
    37.  
    38.         /// <summary>
    39.         /// The should draw axes.
    40.         /// </summary>
    41.         public bool shouldDrawAxes;
    42.  
    43.         /// <summary>
    44.         /// The should draw head.
    45.         /// </summary>
    46.         public bool shouldDrawHead;
    47.  
    48.         /// <summary>
    49.         /// The should draw effects.
    50.         /// </summary>
    51.         public bool shouldDrawEffects;
    52.        
    53.         /// <summary>
    54.         /// The axes.
    55.         /// </summary>
    56.         public GameObject axes;
    57.        
    58.         /// <summary>
    59.         /// The head.
    60.         /// </summary>
    61.         public GameObject head;
    62.        
    63.         /// <summary>
    64.         /// The right eye.
    65.         /// </summary>
    66.         public GameObject rightEye;
    67.        
    68.         /// <summary>
    69.         /// The left eye.
    70.         /// </summary>
    71.         public GameObject leftEye;
    72.        
    73.         /// <summary>
    74.         /// The mouth.
    75.         /// </summary>
    76.         public GameObject mouth;
    77.  
    78.         /// <summary>
    79.         /// The mouth particle system.
    80.         /// </summary>
    81.         ParticleSystem[] mouthParticleSystem;
    82.        
    83.         /// <summary>
    84.         /// The colors.
    85.         /// </summary>
    86.         Color32[] colors;
    87.        
    88.         /// <summary>
    89.         /// The texture.
    90.         /// </summary>
    91.         Texture2D texture;
    92.  
    93.         /// <summary>
    94.         /// The face landmark detector.
    95.         /// </summary>
    96.         FaceLandmarkDetector faceLandmarkDetector;
    97.        
    98.         /// <summary>
    99.         /// The AR camera.
    100.         /// </summary>
    101.         public Camera ARCamera;
    102.        
    103.         /// <summary>
    104.         /// The cam matrix.
    105.         /// </summary>
    106.         Mat camMatrix;
    107.        
    108.         /// <summary>
    109.         /// The dist coeffs.
    110.         /// </summary>
    111.         MatOfDouble distCoeffs;
    112.        
    113.         /// <summary>
    114.         /// The invert Y.
    115.         /// </summary>
    116.         Matrix4x4 invertYM;
    117.        
    118.         /// <summary>
    119.         /// The transformation m.
    120.         /// </summary>
    121.         Matrix4x4 transformationM = new Matrix4x4 ();
    122.        
    123.         /// <summary>
    124.         /// The invert Z.
    125.         /// </summary>
    126.         Matrix4x4 invertZM;
    127.        
    128.         /// <summary>
    129.         /// The ar m.
    130.         /// </summary>
    131.         Matrix4x4 ARM;
    132.  
    133.         /// <summary>
    134.         /// The ar game object.
    135.         /// </summary>
    136.         public GameObject ARGameObject;
    137.  
    138.         /// <summary>
    139.         /// The should move AR camera.
    140.         /// </summary>
    141.         public bool shouldMoveARCamera;
    142.        
    143.         /// <summary>
    144.         /// The 3d face object points.
    145.         /// </summary>
    146.         MatOfPoint3f objectPoints;
    147.        
    148.         /// <summary>
    149.         /// The image points.
    150.         /// </summary>
    151.         MatOfPoint2f imagePoints;
    152.        
    153.         /// <summary>
    154.         /// The rvec.
    155.         /// </summary>
    156.         Mat rvec;
    157.        
    158.         /// <summary>
    159.         /// The tvec.
    160.         /// </summary>
    161.         Mat tvec;
    162.        
    163.         /// <summary>
    164.         /// The rot m.
    165.         /// </summary>
    166.         Mat rotM;
    167.  
    168.         /// <summary>
    169.         /// The web cam texture to mat helper.
    170.         /// </summary>
    171.         CameraTextureToMatHelper webCamTextureToMatHelper;
    172.        
    173.         // Use this for initialization
    174.         void Start ()
    175.         {
    176.             //set 3d face object points.
    177.             objectPoints = new MatOfPoint3f (
    178.                 new Point3 (-31, 72, 86),//l eye
    179.                 new Point3 (31, 72, 86),//r eye
    180.                 new Point3 (0, 40, 114),//nose
    181.                 new Point3 (-20, 15, 90),//l mouse
    182.                 new Point3 (20, 15, 90),//r mouse
    183.                 new Point3 (-69, 76, -2),//l ear
    184.                 new Point3 (69, 76, -2)//r ear
    185.             );
    186.             imagePoints = new MatOfPoint2f ();
    187.             rvec = new Mat ();
    188.             tvec = new Mat ();
    189.             rotM = new Mat (3, 3, CvType.CV_64FC1);
    190.  
    191.             faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat"));
    192.  
    193.             webCamTextureToMatHelper = gameObject.GetComponent<CameraTextureToMatHelper> ();
    194.             webCamTextureToMatHelper.Init ();
    195.         }
    196.  
    197.         /// <summary>
    198.         /// Raises the web cam texture to mat helper inited event.
    199.         /// </summary>
    200.         public void OnWebCamTextureToMatHelperInited ()
    201.         {
    202.             Debug.Log ("OnWebCamTextureToMatHelperInited");
    203.            
    204.             Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
    205.            
    206.             colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
    207.             texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
    208.  
    209.             gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
    210.             Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
    211.  
    212.             float width = gameObject.transform.localScale.x;
    213.             float height = gameObject.transform.localScale.y;
    214.  
    215.             float imageScale = 1.0f;
    216.             float widthScale = (float)Screen.width / width;
    217.             float heightScale = (float)Screen.height / height;
    218.             if (widthScale < heightScale) {
    219.                 Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
    220.                 imageScale = (float)Screen.height / (float)Screen.width;
    221.             } else {
    222.                 Camera.main.orthographicSize = height / 2;
    223.             }
    224.            
    225.             gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
    226.                                    
    227.                                    
    228.             int max_d = Mathf.Max (webCamTextureMat.rows (), webCamTextureMat.cols ());
    229.             camMatrix = new Mat (3, 3, CvType.CV_64FC1);
    230.             camMatrix.put (0, 0, max_d);
    231.             camMatrix.put (0, 1, 0);
    232.             camMatrix.put (0, 2, webCamTextureMat.cols () / 2.0f);
    233.             camMatrix.put (1, 0, 0);
    234.             camMatrix.put (1, 1, max_d);
    235.             camMatrix.put (1, 2, webCamTextureMat.rows () / 2.0f);
    236.             camMatrix.put (2, 0, 0);
    237.             camMatrix.put (2, 1, 0);
    238.             camMatrix.put (2, 2, 1.0f);
    239.                                    
    240.             Size imageSize = new Size (webCamTextureMat.cols () * imageScale, webCamTextureMat.rows () * imageScale);
    241.             double apertureWidth = 0;
    242.             double apertureHeight = 0;
    243.             double[] fovx = new double[1];
    244.             double[] fovy = new double[1];
    245.             double[] focalLength = new double[1];
    246.             Point principalPoint = new Point ();
    247.             double[] aspectratio = new double[1];
    248.                                    
    249.                                    
    250.                                    
    251.                                    
    252.             Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
    253.                                    
    254.             Debug.Log ("imageSize " + imageSize.ToString ());
    255.             Debug.Log ("apertureWidth " + apertureWidth);
    256.             Debug.Log ("apertureHeight " + apertureHeight);
    257.             Debug.Log ("fovx " + fovx [0]);
    258.             Debug.Log ("fovy " + fovy [0]);
    259.             Debug.Log ("focalLength " + focalLength [0]);
    260.             Debug.Log ("principalPoint " + principalPoint.ToString ());
    261.             Debug.Log ("aspectratio " + aspectratio [0]);
    262.                                    
    263.                                    
    264.             if (widthScale < heightScale) {
    265.                 ARCamera.fieldOfView = (float)fovx [0];
    266.             } else {
    267.                 ARCamera.fieldOfView = (float)fovy [0];
    268.             }
    269.  
    270.                                    
    271.             Debug.Log ("camMatrix " + camMatrix.dump ());
    272.                                    
    273.                                    
    274.             distCoeffs = new MatOfDouble (0, 0, 0, 0);
    275.             Debug.Log ("distCoeffs " + distCoeffs.dump ());
    276.                                    
    277.                                    
    278.                                    
    279.             invertYM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, -1, 1));
    280.             Debug.Log ("invertYM " + invertYM.ToString ());
    281.            
    282.             invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
    283.             Debug.Log ("invertZM " + invertZM.ToString ());
    284.            
    285.            
    286.             axes.SetActive (false);
    287.             head.SetActive (false);
    288.             rightEye.SetActive (false);
    289.             leftEye.SetActive (false);
    290.             mouth.SetActive (false);
    291.  
    292.  
    293.             mouthParticleSystem = mouth.GetComponentsInChildren<ParticleSystem> (true);
    294.  
    295.         }
    296.        
    297.         /// <summary>
    298.         /// Raises the web cam texture to mat helper disposed event.
    299.         /// </summary>
    300.         public void OnWebCamTextureToMatHelperDisposed ()
    301.         {
    302.             Debug.Log ("OnWebCamTextureToMatHelperDisposed");
    303.  
    304.             camMatrix.Dispose ();
    305.             distCoeffs.Dispose ();
    306.         }
    307.  
    308.         // Update is called once per frame
    309.         void Update ()
    310.         {
    311.  
    312.             if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) {
    313.                
    314.                 Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
    315.  
    316.  
    317.                 OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
    318.  
    319.                 //detect face rects
    320.                 List<UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect ();
    321.  
    322.                 if (detectResult.Count > 0) {
    323.  
    324.                     //detect landmark points
    325.                     List<Vector2> points = faceLandmarkDetector.DetectLandmark (detectResult [0]);
    326.  
    327.                     if (points.Count > 0) {
    328.                         if (shouldDrawFacePoints)
    329.                             OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, points, new Scalar (0, 255, 0, 255), 2);
    330.  
    331.                         imagePoints.fromArray (
    332.                             new Point ((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2),//l eye
    333.                             new Point ((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2),//r eye
    334.                             new Point (points [33].x, points [33].y),//nose
    335.                             new Point (points [48].x, points [48].y),//l mouth
    336.                             new Point (points [54].x, points [54].y) //r mouth
    337.                                                         ,
    338.                             new Point (points [0].x, points [0].y),//l ear
    339.                             new Point (points [16].x, points [16].y)//r ear
    340.                         );
    341.                                                                        
    342.                                                                        
    343.                         Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
    344.  
    345.  
    346.                         if (tvec.get (2, 0) [0] > 0){
    347.  
    348.                             if (Mathf.Abs ((float)(points [43].y - points [46].y)) > Mathf.Abs ((float)(points [42].x - points [45].x)) / 6.0) {
    349.                                 if (shouldDrawEffects)
    350.                                     rightEye.SetActive (true);
    351.                             }
    352.  
    353.                             if (Mathf.Abs ((float)(points [38].y - points [41].y)) > Mathf.Abs ((float)(points [39].x - points [36].x)) / 6.0) {
    354.                                 if (shouldDrawEffects)
    355.                                     leftEye.SetActive (true);
    356.                             }
    357.                             if (shouldDrawHead)
    358.                                 head.SetActive (true);
    359.                             if (shouldDrawAxes)
    360.                                 axes.SetActive (true);
    361.                                                    
    362.                                                    
    363.                             float noseDistance = Mathf.Abs ((float)(points [27].y - points [33].y));
    364.                             float mouseDistance = Mathf.Abs ((float)(points [62].y - points [66].y));
    365.                             if (mouseDistance > noseDistance / 5.0) {
    366.                                 if (shouldDrawEffects) {
    367.                                     mouth.SetActive (true);
    368.                                     foreach (ParticleSystem ps in mouthParticleSystem) {
    369.                                         ps.enableEmission = true;
    370.                                         ps.startSize = 40 * (mouseDistance / noseDistance);
    371.                                     }
    372.                                 }
    373.                             } else {
    374.                                 if (shouldDrawEffects) {
    375.                                     foreach (ParticleSystem ps in mouthParticleSystem) {
    376.                                         ps.enableEmission = false;
    377.                                     }
    378.                                 }
    379.                             }
    380.  
    381.                                                    
    382.                             Calib3d.Rodrigues (rvec, rotM);
    383.                                                    
    384.                             transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));
    385.                             transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));
    386.                             transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));
    387.                             transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));
    388.                                                    
    389.                             if (shouldMoveARCamera) {
    390.  
    391.                                 if (ARGameObject != null) {
    392.                                     ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
    393.                                     ARUtils.SetTransformFromMatrix (ARCamera.transform, ref ARM);
    394.                                     ARGameObject.SetActive (true);
    395.                                 }
    396.                             } else {
    397.                                 ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
    398.  
    399.                                 if (ARGameObject != null) {
    400.                                     ARUtils.SetTransformFromMatrix (ARGameObject.transform, ref ARM);
    401.                                     ARGameObject.SetActive (true);
    402.                                 }
    403.                             }
    404.  
    405.                         }
    406.                     }
    407.                 }
    408.                                        
    409.                 Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
    410.  
    411.                 if (texture.width != rgbaMat.cols () || texture.height != rgbaMat.rows ())
    412.                     texture.Resize (rgbaMat.cols (), rgbaMat.rows ());              
    413.                 OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, colors);
    414.                                        
    415.             }
    416.                    
    417.         }
    418.                
    419.         /// <summary>
    420.         /// Raises the disable event.
    421.         /// </summary>
    422.         void OnDisable ()
    423.         {
    424.             webCamTextureToMatHelper.Dispose ();
    425.  
    426.             faceLandmarkDetector.Dispose ();
    427.         }
    428.        
    429.         /// <summary>
    430.         /// Raises the back button event.
    431.         /// </summary>
    432.         public void OnBackButton ()
    433.         {
    434.             #if UNITY_5_3 || UNITY_5_3_OR_NEWER
    435.             SceneManager.LoadScene ("DlibFaceLandmarkDetectorSample");
    436.             #else
    437.             Application.LoadLevel ("DlibFaceLandmarkDetectorSample");
    438.             #endif
    439.         }
    440.        
    441.         /// <summary>
    442.         /// Raises the play button event.
    443.         /// </summary>
    444.         public void OnPlayButton ()
    445.         {
    446.             webCamTextureToMatHelper.Play ();
    447.         }
    448.        
    449.         /// <summary>
    450.         /// Raises the pause button event.
    451.         /// </summary>
    452.         public void OnPauseButton ()
    453.         {
    454.             webCamTextureToMatHelper.Pause ();
    455.         }
    456.        
    457.         /// <summary>
    458.         /// Raises the stop button event.
    459.         /// </summary>
    460.         public void OnStopButton ()
    461.         {
    462.             webCamTextureToMatHelper.Stop ();
    463.         }
    464.        
    465.         /// <summary>
    466.         /// Raises the change camera button event.
    467.         /// </summary>
    468.         public void OnChangeCameraButton ()
    469.         {
    470.             webCamTextureToMatHelper.Init (
    471.                 #if WEBCAMTEXTURE_MODE
    472.                 null,
    473.                 webCamTextureToMatHelper.requestWidth,
    474.                 webCamTextureToMatHelper.requestHeight,
    475.                 !webCamTextureToMatHelper.requestIsFrontFacing
    476.                 #endif
    477.             );
    478.         }
    479.                
    480.         public void OnDrawFacePointsButton ()
    481.         {
    482.             if (shouldDrawFacePoints) {
    483.                 shouldDrawFacePoints = false;
    484.             } else {
    485.                 shouldDrawFacePoints = true;
    486.             }
    487.         }
    488.                
    489.         public void OnDrawAxesButton ()
    490.         {
    491.             if (shouldDrawAxes) {
    492.                 shouldDrawAxes = false;
    493.                 axes.SetActive (false);
    494.             } else {
    495.                 shouldDrawAxes = true;
    496.             }
    497.         }
    498.                
    499.         public void OnDrawHeadButton ()
    500.         {
    501.             if (shouldDrawHead) {
    502.                 shouldDrawHead = false;
    503.                 head.SetActive (false);
    504.             } else {
    505.                 shouldDrawHead = true;
    506.             }
    507.         }
    508.  
    509.         public void OnDrawEffectsButton ()
    510.         {
    511.             if (shouldDrawEffects) {
    512.                 shouldDrawEffects = false;
    513.                 rightEye.SetActive (false);
    514.                 leftEye.SetActive (false);
    515.                 mouth.SetActive (false);
    516.             } else {
    517.                 shouldDrawEffects = true;
    518.             }
    519.         }
    520.  
    521.     }
    522. }
    523. #endif
    NatCamToMatHelper.cs

    Code (CSharp):
    1. /*
    2. *   NatCamToMatHelper
    3. *   Compatible with NatCam 1.2+
    4. *   OPENCV_DEVELOPER_MODE must be uncommented in NatCam.cs, NatCamNativeInterface.cs, and NatCamFallbackInterface.cs
    5. *   Copyright (c) 2016 Yusuf Olokoba
    6. */
    7.  
    8. //#define OPENCV
    9. #define ON_DEMAND_MATRIX_UPDATE //If enabled, matrix will only be updated when GetMat() is called.
    10.  
    11. #if OPENCV
    12.  
    13. using UnityEngine;
    14. using UnityEngine.Events;
    15. using System;
    16. using System.Collections;
    17. using NatCamU.Internals;
    18. using OpenCVForUnity;
    19.  
    20.  
    21. namespace NatCamU.Extensions {
    22.  
    23.     public class NatCamToMatHelper : UnitygramBase, IDisposable {
    24.  
    25.         [Header("Events")]
    26.         public UnityEvent OnInitedEvent;
    27.         public UnityEvent OnDisposedEvent;
    28.  
    29.         Mat rgbaMat;
    30.         bool initialized;
    31.         IntPtr previewData;
    32.  
    33.  
    34.         #region --NatCam Callbacks--
    35.  
    36.         // Use this for initialization
    37.         public override void Start () {
    38.             //Register for native preview updates
    39.             NatCamNativeInterface.OnNativePreviewUpdate += OnNativePreviewUpdate;
    40.         }
    41.  
    42.         public override void OnPreviewStart () {
    43.             //Start safely
    44.             CheckedStart().Invoke(this);
    45.         }
    46.  
    47.         private void OnNativePreviewUpdate (ComponentBuffer bufferType, UIntPtr buffer, int width, int height, int size) {
    48.             //Store the handle to the preview data if this is the preview data handle being given to us
    49.             previewData = bufferType == ComponentBuffer.RGBA32 ? unchecked((IntPtr)(long)(ulong)buffer) : IntPtr.Zero;
    50.         }
    51.         #endregion
    52.  
    53.  
    54.         #region --Client API--
    55.  
    56.         public void Init () {
    57.             //If initialized, dipose and start over
    58.             if (initialized) Dispose();
    59.             //Create initialization event if null
    60.             OnInitedEvent = OnInitedEvent ?? new UnityEvent ();
    61.             //Create disposed event if null
    62.             OnDisposedEvent = OnDisposedEvent ?? new UnityEvent ();
    63.             //Preview type MUST be readable since we need access to preview data
    64.             PreviewType = PreviewType.Readable;
    65.             //Start NatCam
    66.             base.Start();
    67.         }
    68.  
    69.         /// <summary>
    70.         /// Init the specified camera, requestWidth, requestHeight.
    71.         /// </summary>
    72.         /// <param name="camera">Device camera.</param>
    73.         public void Init (Facing camera) {
    74.             //Set the desired facing
    75.             Facing = camera;
    76.             //Initialize
    77.             Init();
    78.         }
    79.  
    80.         /// <summary>
    81.         /// Has this instance been initialized?
    82.         /// </summary>
    83.         /// <returns><c>true</c>, if inited was ised, <c>false</c> otherwise.</returns>
    84.         public bool isInited () { return initialized;}
    85.  
    86.         /// <summary>
    87.         /// Play this instance.
    88.         /// </summary>
    89.         public void Play () { if (initialized) NatCam.Play ();}
    90.  
    91.         /// <summary>
    92.         /// Pause this instance.
    93.         /// </summary>
    94.         public void Pause () { if (initialized) NatCam.Pause ();}
    95.  
    96.         /// <summary>
    97.         /// Stop this instance.
    98.         /// </summary>
    99.         public void Stop () {  if (initialized) NatCam.Pause ();}
    100.  
    101.         /// <summary>
    102.         /// Is the camera playing?
    103.         /// </summary>
    104.         /// <returns><c>true</c>, if playing was ised, <c>false</c> otherwise.</returns>
    105.         public bool isPlaying () { return (initialized && NatCam.IsPlaying);}
    106.  
    107.         /// <summary>
    108.         /// Gets the device camera.
    109.         /// </summary>
    110.         /// <returns>The device camera.</returns>
    111.         public DeviceCamera GetDeviceCamera () { return NatCam.ActiveCamera;}
    112.  
    113.         /// <summary>
    114.         /// Dids the update this frame.
    115.         /// </summary>
    116.         /// <returns><c>true</c>, if update this frame was dided, <c>false</c> otherwise.</returns>
    117.         public bool didUpdateThisFrame () { return true;} //NatCam isn't synchronized with Unity's frame updates, so just say yes
    118.  
    119.         /// <summary>
    120.         /// Gets the matrix.
    121.         /// </summary>
    122.         /// <returns>The matrix.</returns>
    123.         public Mat GetMat () {
    124.             #if ON_DEMAND_MATRIX_UPDATE
    125.             //Check if we are running on fallback interface
    126.             if (NatCam.Interface == NatCamInterface.FallbackInterface) return NatCam.PreviewMatrix;
    127.             //Create the RGBA matrix if null
    128.             rgbaMat = rgbaMat ?? new Mat(new Size(NatCam.Preview.width, NatCam.Preview.height), CvType.CV_8UC4);
    129.             //Check the size, in case we switched cameras with different resolutions for example
    130.             if (rgbaMat.cols() != NatCam.Preview.width || rgbaMat.rows() != NatCam.Preview.height) Imgproc.resize(rgbaMat, rgbaMat, new Size(NatCam.Preview.width, NatCam.Preview.height));
    131.             //Load the preview data into the matrix, but check that there is a valid handle (not NULL/IntPtr.Zero)
    132.             if (previewData != IntPtr.Zero) Utils.copyToMat(previewData, rgbaMat);
    133.             //Return
    134.             return rgbaMat;
    135.             #else
    136.             //Simply assign reference to NatCam's preview matrix
    137.             return NatCam.PreviewMatrix;
    138.             #endif
    139.         }
    140.  
    141.         /// <summary>
    142.         /// Releases all resource used by the <see cref="NatCamToMatHelper"/> object.
    143.         /// </summary>
    144.         /// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="NatCamToMatHelper"/>. The
    145.         /// <see cref="Dispose"/> method leaves the <see cref="NatCamToMatHelper"/> in an unusable state. After
    146.         /// calling <see cref="Dispose"/>, you must release all references to the <see cref="NatCamToMatHelper"/> so
    147.         /// the garbage collector can reclaim the memory that the <see cref="NatCamToMatHelper"/> was occupying.</remarks>
    148.         public void Dispose ()  {
    149.             //Reset initialized
    150.             initialized = false;
    151.             //Release NatCam
    152.             NatCam.Release();
    153.             //Dispose the RGBA matrix if it isn't null
    154.             if (rgbaMat != null) rgbaMat.Dispose (); rgbaMat = null;
    155.             //Invoke the OnDisposedEvent
    156.             if (OnDisposedEvent != null) OnDisposedEvent.Invoke ();
    157.         }
    158.         #endregion
    159.  
    160.  
    161.         #region --Utility--
    162.  
    163.         private IEnumerator CheckedStart () {
    164.             //Check dims
    165.             if (NatCam.Interface == NatCamInterface.FallbackInterface &&
    166.                 (NatCam.Preview.width == NatCam.Preview.height &&
    167.                 NatCam.Preview.height == 16)) yield return new WaitWhile (() => NatCam.Preview.width == 16);
    168.             //When control reaches here, the PreviewMatrix has not yet been resized, so wait one frame
    169.             yield return null;
    170.             //Invoke initialization event
    171.             if (OnInitedEvent != null) OnInitedEvent.Invoke ();
    172.             //Set initialized
    173.             initialized = true;
    174.         }
    175.         #endregion
    176.     }
    177. }
    178. #endif
     
  49. Xtense

    Xtense

    Joined:
    Feb 18, 2015
    Posts:
    34
    Hi,

    We're currently in the middle of a project utilizing heavy visual processing on mobile platforms and will require hardware acceleration through OpenCV in Unity, so we're interested in using your plugin for that purpose - we require spherical and panorama stitching and unwrapping for our project and OpenCV has classes for that. We can't tell if you have stitching implemented, so we would like to ask just that and, if not, do you, perhaps, have any hints or pointers on how we could implement stitching through OpenCV for mobile platforms in Unity? Since our project is targeted for mobile platforms, we would be really happy if we could keep stitching separate from the rest of the overhead to make it as light and fast as possible.

    Best Regards,
    Michał Stopa, INTERMARUM
     
  50. shawww

    shawww

    Joined:
    Sep 30, 2014
    Posts:
    43
    While I haven't used the stitching and warping, it appears that other people have. The plugin comes with a framework that includes all of those classes, so I'd imagine they're exposed. At worst, you're going to get a template for how you can call the C++ OpenCV from C#