Search Unity

[RELEASED] OpenCV for Unity

Discussion in 'Assets and Asset Store' started by EnoxSoftware, Oct 30, 2014.

  1. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    WebCamTextureARExample is included in DlibFaceLandmarkDetectorWithOpenCVExample.unitypackage.
    Please unzip “DlibFaceLandmarkDetectorWithOpenCVExample.zip”,import DlibFaceLandmarkDetectorWithOpenCVExample.unitypackage.
     
  2. our919

    our919

    Joined:
    Mar 18, 2017
    Posts:
    1
    Hello Enox,
    I want to Identifying letter A-Z in Unity5.5.0 with OpencvForUnity 2.1.2.
    When i call “Imgproc.resize (aaa, temp, new Size (64, 64))” ,Unity will crashes.When i restart unity,i see six error message(CurrentThreadIsMainThread()),i don know how to fix it.please help me to resolve this problem.

    Code (CSharp):
    1.  void Update ()
    2.         {
    3.             if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
    4.                 timer += Time.deltaTime;
    5.                 if (timer > timerMax) {
    6.                     timer = 0;
    7.                     if (RunTimer != 1) {
    8.                         Matlist = new List<Mat> ();
    9.                         Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
    10.                         Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
    11.              
    12.                         //{{----范围检测----}}//
    13.                         red_H_Low = int.Parse (inputfield_red_H_Low.text);//100
    14.                         red_H_High = int.Parse (inputfield_red_H_High.text);//124
    15.                         blue_H_Low = int.Parse (inputfield_blue_H_Low.text);//0
    16.                         blue_H_High = int.Parse (inputfield_blue_H_High.text);//20,23
    17.                         red_S_Low = int.Parse (inputfield_S_Low.text);//43,16,25
    18.                         red_S_High = int.Parse (inputfield_S_High.text);
    19.                         red_V_Low = int.Parse (inputfield_V_Low.text);
    20.                         red_V_High = int.Parse (inputfield_V_High.text);
    21.                         threshold1 = int.Parse (inputfield_threshold1.text);
    22.                         timerMax = float.Parse (inputfield_TimerIS.text);
    23.                         //{{----转换HSV----}}//
    24.                         Mat hsvMat = new Mat ();
    25.                         Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGB2HSV);
    26.  
    27.                         List<Mat> SplitHSV = new List<Mat> ();
    28.                         Core.split (hsvMat, SplitHSV);
    29.                         Imgproc.equalizeHist (SplitHSV [2], SplitHSV [2]);
    30.                         Core.merge (SplitHSV, hsvMat);
    31.  
    32.                         imgThresholded = new Mat ();
    33.                         imgThresholded2 = new Mat ();
    34.  
    35.                         Core.inRange (hsvMat, new Scalar (red_H_Low, red_S_Low, red_V_Low), new Scalar (red_H_High, red_S_High, red_V_High), imgThresholded);
    36.                         Core.inRange (hsvMat, new Scalar (blue_H_Low, red_S_Low, red_V_Low), new Scalar (blue_H_High, red_S_High, red_V_High), imgThresholded2);
    37.                         Core.addWeighted (imgThresholded, 0.5, imgThresholded2, 0.5, 0, imgThresholded);//只是把这里调换了一下位置就解决了红色不能识别的问题了
    38.  
    39.                         //{{----去噪点、连通连通域----}}//
    40.                         //开操作(去噪点)
    41.                         Imgproc.morphologyEx (imgThresholded, imgThresholded, Imgproc.MORPH_OPEN, element);
    42.                         //闭操作(连通连通域)
    43.                         Imgproc.morphologyEx (imgThresholded, imgThresholded, Imgproc.MORPH_CLOSE, element);
    44.                         Imgproc.threshold (imgThresholded, imgThresholded, threshold1, 255, Imgproc.THRESH_BINARY_INV & Imgproc.THRESH_OTSU);
    45.  
    46.                         imgThresholded.copyTo (imgThresholded2);
    47.  
    48.                         if (isErzhi)
    49.                             Utils.matToTexture2D (imgThresholded, texture, webCamTextureToMatHelper.GetBufferColors ());
    50.                         if (!isErzhi)
    51.                             Utils.matToTexture2D (rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors ());
    52.  
    53.                         ////{{----寻找轮廓----}}//
    54.                         contours = new List<MatOfPoint> ();
    55.                         contours2 = new List<MatOfPoint> ();
    56.                         Mat hierarchy = new Mat ();
    57.                         Imgproc.findContours (imgThresholded2, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE, new Point (0, 0));
    58.                         //Mat    imgContours = new Mat (reheight, rewidth, CvType.CV_8UC3);
    59.                         Debug.Log ("contours.Count  : " + contours.Count );
    60.                         Debug.Log ("MinRect :" + MinRect + "    MaxRect :" + MaxRect);
    61.                         Imgproc.rectangle (rgbaMat, new Point(180,250), new Point(200,266), new Scalar (125), 10);
    62.                         for (int i = 0; i < contours.Count; i++) {
    63.                             OpenCVForUnity.Rect rect = Imgproc.boundingRect (contours [i]);
    64.                             MatOfPoint tempMatOfPoint = new MatOfPoint (contours [i].toArray ());
    65.                             if (rect.width > MinRect && rect.height > MinRect)
    66.                             if (rect.width < MaxRect && rect.height < MaxRect)
    67.                             if (rect.y > TopRect && ((rect.y) < BottomRect)) {
    68.                                 //Debug.Log(Imgproc.)
    69.                                 MatOfPoint2f point2f = new MatOfPoint2f (contours [i].toArray ());
    70.                                 Debug.Log ("contourarea::" + i.ToString() + "   "+ Imgproc.contourArea (point2f).ToString() + "    w:" +rect.width + "    h:" + rect.height);
    71.                                 ////{{----截取ROI----}}//
    72.                                 Point center = new Point (rect.x + rect.width / 2, rect.y + rect.height / 2);
    73.                                 Point p1 = new Point (rect.x, rect.y);
    74.                                 Point p2 = new Point (rect.x + rect.width, rect.y + rect.height);
    75.  
    76.                                 Imgproc.circle (rgbaMat, center, 6, new Scalar (255));
    77.                                 Imgproc.rectangle (rgbaMat, p1, p2, new Scalar (125), 6);
    78.                                 //Imgproc.circle (imgThresholded, center, 6, new Scalar (255));
    79.                                 //Imgproc.rectangle (imgThresholded, p1, p2, new Scalar (125), 6);
    80.  
    81.                                 contours2.Add (tempMatOfPoint);
    82.                             }
    83.                         }
    84.                          
    85.                         IngContourNum = 0;
    86.  
    87.                         if (isErzhi)
    88.                             showinA (imgThresholded);
    89.                         if (!isErzhi)
    90.                             showinA (rgbaMat);
    91.                  
    92.  
    93.                         RunTimer++;
    94.                     }
    95.                     if(RunTimer ==1){
    96.                         RunTimer++;
    97.                     }
    98.                     if (RunTimer > RunTimerMax) {
    99.                         RunTimer = 0;
    100.                         webCamTextureToMatHelper.Stop ();
    101.                         StopAllCoroutines ();
    102.                         webCamTextureToMatHelper.Play ();
    103.                     }
    104.  
    105.  
    106.                 }
    107.                 if (timer < timerMax) {
    108.                     if(contours2.Count < 10)
    109.                     if(IngContourNum < contours2.Count)
    110.                     {
    111.                             Debug.Log ("IngContourNum: " + IngContourNum + "   contours2.Count: " + contours2.Count);
    112.                             int i = IngContourNum;
    113.                             OpenCVForUnity.Rect rect = Imgproc.boundingRect (contours [i]);
    114.                             Point center = new Point (rect.x + rect.width / 2, rect.y + rect.height / 2);
    115.                             MatOfPoint tempMatOfPoint = new MatOfPoint (contours [i].toArray ());
    116.  
    117.                             Mat temp = new Mat ();
    118.  
    119.  
    120.                             int NewWidth = 0;
    121.                             if (rect.width > rect.height)
    122.                                 NewWidth = rect.width;
    123.                             else
    124.                                 NewWidth = rect.height;
    125.                             int NewWidth_haft = (int)(NewWidth / 2);
    126.                             Point pos_start = new Point (center.x - NewWidth_haft, center.y - NewWidth_haft);
    127.                         if ((pos_start.x > 0 && pos_start.y > 0) || true) {
    128.                             UnityEngine.Rect rectU = new UnityEngine.Rect ((float)pos_start.x, (float)pos_start.y, (float)rect.width, (float)rect.height);
    129.                             int start_x = (int)pos_start.x;
    130.                             int start_y = (int)pos_start.y;
    131.                             //rectU = new UnityEngine.Rect ((float)pos_start.x, (float)pos_start.y, (float)(NewWidth *2), (float)(NewWidth * 2));
    132.                             temp = new Mat (imgThresholded, new Range (start_y, start_y + NewWidth), new Range (start_x, start_x + NewWidth));
    133.                             Mat temp2 = new Mat ();
    134.  
    135.  
    136.                             //Mat temp = new Mat ();
    137.  
    138.                             //Go wrong
    139.                             Imgproc.resize (temp, temp, new Size (64, 64));
    140.  
    141.  
    142.                             Matlist.Add (temp);
    143.                             //myfeature.jsFeature (aaa);
    144.                              
    145.                  
    146.                             //StartCoroutine( UploadPNGfromMat (temp, bbb.ToString()));
    147.                             bbb++;
    148.  
    149.                         }
    150.                         IngContourNum++;
    151.                      }
    152.  
    153.                     Debug.Log ("contours2.Count  : " + contours2.Count);
    154.                     ShowTextWord (t1, "c2.Count  : " + contours2.Count);
    155.                 }
    156.  
    157.             }
    158.         }
    159.     public void showinA(Mat imgMat)
    160.         {
    161.             Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);
    162.             Utils.matToTexture2D (imgMat, texture);
    163.             //a.GetComponent<Renderer> ().material.mainTexture = texture;
    164.             bg.sprite = Sprite.Create (texture, new UnityEngine.Rect (0, 0, texture.width, texture.height), Vector2.zero);
    165.         }

    go wrong in code 138 and 144
     
  3. ina

    ina

    Joined:
    Nov 15, 2010
    Posts:
    1,084
    For portrait mode, is it possible to use full screen? If so, how do you properly size the Quad to fill up the entire screen? (on iOS/Android)
     
  4. suntabu

    suntabu

    Joined:
    Dec 21, 2013
    Posts:
    77
  5. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Could you send your code? store@enoxsoftware.com
     
  6. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    I think that it is possible by customizing this part.
    Code (CSharp):
    1.  
    2.             gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
    3.             Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
    4.  
    5.                                  
    6.             float width = webCamTextureMat.width();
    7.             float height = webCamTextureMat.height();
    8.                                  
    9.             float widthScale = (float)Screen.width / width;
    10.             float heightScale = (float)Screen.height / height;
    11.             if (widthScale < heightScale) {
    12.                 Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
    13.             } else {
    14.                 Camera.main.orthographicSize = height / 2;
    15.             }
    16.  
     
  7. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Android only supports mjpeg/avi format.
    iOS supports mjpeg/avi and h264/mov format. but, there is a possibility that the h264/mov format does not support to Videoio.CAP_PROP_POS_FRAMES settings.
     
  8. electric_jesus

    electric_jesus

    Joined:
    Mar 8, 2015
    Posts:
    36
    Hello @EnoxSoftware
    I want to use OpenCV for Unity to track players face position and check whether mouth is open. I compared your mobile test apps and I noticed that while WebCamTextureDetectFaceSample works perfectly (but doesn't seem to track mouth points), the FaceTrackerExample does the job but works quite slow. Is there a way to speed up this code somehow? I don't need to track the eyes, only mouth detection. Any advice would be appreciated.
    I think I'll try to use WebCamTextureDetectFaceSample approach to estimate face position and move FaceTracker calls from the main thread to check if mouth is open without freezes.
     
    Last edited: Mar 29, 2017
  9. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Have you tried the mouth cascade detector? After detecting the mouth at the beginning, I think that it is possible to judge opening and closing of mouth by acquiring a histogram.
    http://stackoverflow.com/questions/29721755/detecting-mouth-with-opencv
    http://vgg.fiit.stuba.sk/2013-06/eye-blinking-detection/
     
  10. Sayugo

    Sayugo

    Joined:
    Aug 23, 2016
    Posts:
    6
    Hello,
    I was try your Markerless AR example and it's very nice. But I have a problem when I try to play video streaming from url my Unity editor got not responding. Are your asset can do that? Cause when I learn from OpenCV documentation here http://docs.opencv.org/3.2.0/d8/dfe/classcv_1_1VideoCapture.html I can easily just by change the filename by link of my streaming video. Thanks in advance
     
  11. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Unfortunately, streaming video playback is supported only on Windows.
    Please set ffmpeg.dll like the following post.
    https://forum.unity3d.com/threads/released-opencv-for-unity.277080/page-16#post-2723212
    I have succeeded in playing the next link in VideoCaptureExample.
    capture.open ("http://plazacam.studentaffairs.duke.edu/mjpg/video.mjpg");
     
  12. tdneren2

    tdneren2

    Joined:
    Sep 29, 2013
    Posts:
    14
    First of all, really great asset {great wok of cloning the package} I was wondering ... I have been trying to work with image processing with c# own Texture2D and readpixels - in there I have enabled an optimized method to track objects (and especially a ball) with the help of little algorithm i call 'Shadow magic' -> this finds both highlight and shadow of a ball, alongside with finding the max RGB-channel for best thresholding - could you maybe optimize the WebCamTextureToMatHelper-script in such a way {or maybe tell me how to >> my own example is for instance better equipped for low light/evening light}:

    Code (CSharp):
    1.  
    2.  
    3. scrReadWidth = 426;
    4.         xPxDiv30 = Mathf.RoundToInt(scrReadWidth / 7.1f);
    5.         yPxDiv30 = Mathf.RoundToInt(scrReadHeight / 8f);        // scrPfactor: 128 til 312 ... scrPfactor8: 16x16 v/ 128p [128/8 = 16]; 24x24 v/ 192p [192/8 =24]; 32x32 v/ 256p [256/8 = 32]; 40x40 v/ 312p [312/8 = 40];
    6.         myTexture2D = new Texture2D(scrReadWidth, scrReadHeight, TextureFormat.RGB24, false);
    7.         myTexture2D.ReadPixels(new Rect(0, 0, scrReadWidth, scrReadHeight), 0, 0);
    8.         myTexture2D.Apply();
    9.         //posCoord.Clear();
    10.         int tmpCount0 = 0;
    11.         int tmpCount2 = 0;
    12.  
    13.         for (iy = 0; iy < yPxDiv30; iy++)
    14.         {
    15.             for (ix = 0; ix < xPxDiv30; ix++)
    16.             {
    17.                 for (int iy29 = iy * 8; iy29 < iy * 8 + 8; iy29++)
    18.                 {
    19.                     for (int ix29 = ix * 7; ix29 < ix * 7 + 7; ix29++)
    20.                     {
    21.                         hitColorTH = myTexture2D.GetPixel(iy29, ix29);
    22.                         tmpAvgColR += hitColorTH.r;
    23.                         tmpAvgColG += hitColorTH.g;
    24.                         tmpAvgColB += hitColorTH.b;
    25.                         pixMeas++;
    26.                     } // SLUT: for (int iy29 = 0; iy29 < 30; iy29++)
    27.                     // Herunder tages kun 8x8 udsnit af pixels:                    
    28.                 } // SLUT: for (int ix29 = 0; ix29 < 30; ix29++)
    29.  
    30.                 tmpAvgColR /= 56;
    31.                 tmpAvgColG /= 56;
    32.                 tmpAvgColB /= 56;
    33.  
    34.                 // START BALL - o'ER 'O-ER':
    35.  
    36.                 roedVperCent = (Mathf.Abs(tmpAvgColR - tmpAvgColR1)) * 100; gVperCent = (Mathf.Abs(tmpAvgColG - tmpAvgColG1)) * 100; bVperCent = (Mathf.Abs(tmpAvgColB - tmpAvgColB1)) * 100;
    37.                 shamaRed = tmpAvgColR - tmpAvgColR1; shamaGreen = tmpAvgColG - tmpAvgColG1; shamaBlue = tmpAvgColB - tmpAvgColB1;
    38.                 // if(Mathf.Abs(shamaRed-shamaGreen)<ballShamaThreshold && (Mathf.Abs(shamaGreen-shamaBlue)<ballShamaThreshold){}
    39.                 //shamaRed = Mathf.Approximately(roedVperCent, gVperCent);
    40.                 //bool testApprox = ((a - b) < 0 ? ((a - b) * -1) : (a - b)) <= threshold;
    41.                 rVperCent = Mathf.Max(roedVperCent, gVperCent);
    42.                 rVperCent = Mathf.Max(rVperCent, bVperCent);
    43.  
    44.                 tmpY = (iy + 1).ToString();
    45.                 tmpX = (ix + 1).ToString();
    46.                 tmpNumOfSq = (xPxDiv30 * yPxDiv30) - 1;
    47.                 if (posCoord.Count > tmpNumOfSq && boolpostCountSwitch == false) { tmpCount0 = 0; boolpostCountSwitch = true; }
    48.  
    49.                 if (tmpCount0 < tmpNumOfSq && boolpostCountSwitch == false)
    50.                 {
    51.                     tmpCount0 = posCoord.Count;
    52.                     if (rVperCent < ballCentrererX || (Mathf.Abs(shamaRed - shamaGreen) < ballShamaThreshold && (Mathf.Abs(shamaGreen - shamaBlue) < ballShamaThreshold)))
    53.                     {
    54.                         //if (iy29 == 10 && ix == 10 && yPxDiv30 == 10 && xPxDiv30 == 10) { print("shamas: " + shamaRed + " : " + shamaGreen + " : " + shamaBlue); }
    55.                         posCoord.Add(tmpX + ";" + tmpY); numbOfBlacks++;
    56.                     }
    57.                     else
    58.                     {
    59.                         posCoord.Add("T");
    60.                     }
    61.                 }
    62.                 else
    63.                 {
    64.                     if (rVperCent < ballCentrererX || (Mathf.Abs(shamaRed - shamaGreen) < ballShamaThreshold && (Mathf.Abs(shamaGreen - shamaBlue) < ballShamaThreshold)))
    65.                     {
    66.                         posCoord.RemoveAt(tmpCount0);
    67.                         posCoord.Insert(tmpCount0, tmpX + ";" + tmpY); numbOfBlacks++;
    68.                     }
    69.                     else
    70.                     {
    71.                         posCoord.RemoveAt(tmpCount0);
    72.                         posCoord.Insert(tmpCount0, "B");
    73.                     }
    74.                     tmpCount0++;
    75.                 }
     
  13. shinjism

    shinjism

    Joined:
    Mar 15, 2016
    Posts:
    1
    Hello,
    I would like to run OpenCVForUnityExample on iOS 10.
    And if possible, I would like to use Native Cardboard support for iOS provided by Unity 5.6, is it possible?
    Thanks in advance
     
  14. lightstone

    lightstone

    Joined:
    Feb 9, 2013
    Posts:
    5
    Hi,
    purchased OpenCV a while back ago. Been using it for some projects. Great porting btw :)

    My question for now: Is there any from the samples that is using sub-pixel algorithm?
    What I need is to be able to track IR laser beam with sub-pixel accuracy. But I'm not quite sure where to start.
     
  15. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    I have not tested the combination of ”OpenCVforUnity” and ”Native Cardboard support for iOS”.
     
  16. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    OpenCV for Unity
    Released Version 2.1.5


    Version changes
    2.1.5
    [Common]Updated to OpenCV3.2.0.
    [Common]Added fuzzy, phase_unwrapping, saliency, shape, tracking module.
    [Common]Added TrackingSample.
    [iOS]Added ios_exclude_contrib.zip for build size reduction.
    [Android]Added android_exclude_contrib.zip for build size reduction.
     
  17. DavidRDev

    DavidRDev

    Joined:
    Jul 19, 2012
    Posts:
    47
    Hello!

    We installed Release Version 2.1.5 with Unity 5.4.2. We followed the tutorial video and we're running into issues with this version.


    Anything related to face tracking doesn't work, some examples appear to work and some examples are appearing black. We've tried reimporting the plugin and refollowing the steps and we can't get it to work. Is anyone else running into these issues with 2.1.5?

    We are also using DlibFaceLandmarkDetector. We see a redbox around faces in editor, but when we test on device, we see nothing.


    Any help would be appreciated!
     
  18. zykoonLokey

    zykoonLokey

    Joined:
    Dec 26, 2016
    Posts:
    2
    Hi!

    Ive recently purchased both OpenCV and Dlib, test their examples and I noticed these things ina all of their examples; FaceSwapperExample , FaceMaskExample and FaceTrackerexample using OpenCV runs slowly like alot is going on with the device. Is there is anything we can do to make it run smoothly like facebook's MSQRD app, and is ther anything to record the a video with the face mask on in this package.


    thanks,
     
  19. hello_jangbuk

    hello_jangbuk

    Joined:
    Apr 6, 2017
    Posts:
    1
    hi .
    I downloaded “MarkerLessARExample” and run prefect on my windows pc ,But I switch platform to ios or android and found that when showing the cube ,then drop frames...have any solution to run on mobile platform smoothly?
     
  20. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    At the moment, There is no such example.
     
  21. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Which example does not work?
     
  22. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    It is possible to speed up processing by downscaling image. OptimizationSample is optimized by downscale and skipframe using "OpenCV for Unity".
    Also, Video recording is not possible with this package.
     
  23. Wolk

    Wolk

    Joined:
    Jul 12, 2014
    Posts:
    59
    Hello, with the new update (to ocv 3.2) , is it possible to do this yet? I want to save either descriptors or descriptormatcher, since i don't want to spend 5 minutes for it to process if i have a heap of pictures.
    Thanks


    Edit:
    With .save() i get
    %YAML:1.0
    ---
    my_object:
    {}

    With.write() i get
    %YAML:1.0
    ---


    So it seems it's not working yet?
     
    Last edited: Apr 10, 2017
  24. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    This bug was fixed in OpenCV 3.2.

    This issue seems to be an issue of OpenCV itself.
    In order to solve this issue, the write method of matcher.cpp needs to be implemented.
    https://github.com/opencv/opencv/blob/master/modules/features2d/src/matchers.cpp#L666
    This method is called from algorithm.cpp.
    https://github.com/opencv/opencv/blob/master/modules/core/src/algorithm.cpp#L56

    the write method of svm.cpp already be implemented, so it is possible to save the training result.
    https://github.com/opencv/opencv/blob/master/modules/ml/src/svm.cpp#L2057
    Code (CSharp):
    1. <?xml version="1.0"?>
    2. <opencv_storage>
    3. <opencv_ml_svm>
    4.   <format>3</format>
    5.   <svmType>C_SVC</svmType>
    6.   <kernel>
    7.     <type>LINEAR</type></kernel>
    8.   <C>1.</C>
    9.   <term_criteria><iterations>100</iterations></term_criteria>
    10.   <var_count>2</var_count>
    11.   <class_count>2</class_count>
    12.   <class_labels type_id="opencv-matrix">
    13.     <rows>2</rows>
    14.     <cols>1</cols>
    15.     <dt>i</dt>
    16.     <data>
    17.       -1 1</data></class_labels>
    18.   <sv_total>1</sv_total>
    19.   <support_vectors>
    20.     <_>
    21.       -8.13008100e-003 8.16326495e-003</_></support_vectors>
    22.   <uncompressed_sv_total>3</uncompressed_sv_total>
    23.   <uncompressed_support_vectors>
    24.     <_>
    25.       501. 10.</_>
    26.     <_>
    27.       255. 10.</_>
    28.     <_>
    29.       501. 255.</_></uncompressed_support_vectors>
    30.   <decision_functions>
    31.     <_>
    32.       <sv_count>1</sv_count>
    33.       <rho>-2.9915380786460908e+000</rho>
    34.       <alpha>
    35.         1.</alpha>
    36.       <index>
    37.         0</index></_></decision_functions></opencv_ml_svm>
    38. </opencv_storage>
     
    Last edited: Apr 11, 2017
  25. Wolk

    Wolk

    Joined:
    Jul 12, 2014
    Posts:
    59
    I'm using markerless ar - https://www.assetstore.unity3d.com/en/#!/content/77560
    Which uses knn, any workaround?
    orz
     
  26. integence

    integence

    Joined:
    Feb 17, 2012
    Posts:
    19
    Hi

    I need an urgent help please :)

    I try to use a laser pointer as a game controller (/mouse). A picture is worth a thousand words, so:


    http://projekty.integence.pl/opencv-calib-reverse-projection.jpg

    I'm stuck at the last part where I need to reverse point projection, to transform an ImagePoint back into an ObjectPoint.

    I believe that I need a function opposite to the Calib3d.projectPoints(), but I don't know OpenCV very well, and can't find such a function in the docs.

    I tried to do it manually, like this:
    Code (CSharp):
    1. // example point
    2. MatOfPoint2f imagePoint = new MatOfPoint2f(new Point(200, 100));
    3. Calib3d.Rodrigues(rvec, rotationMatrix);
    4. Mat P = (cameraMatrix.inv() * imagePoint - tvec) * rotationMatrix.inv();
    But I get an empty matrix [0*0] in return, and I don't know why or what I did wrong.

    Any help will be much appreciated.
    Thanks in advance! :)
     
  27. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Could you try this code?
    http://stackoverflow.com/questions/12299870/computing-x-y-coordinate-3d-from-image-point

    C# version
    Code (CSharp):
    1.  
    2.             //set cameraparam
    3.             Mat cameraMatrix = new Mat (3, 3, CvType.CV_64FC1);
    4. //Please set an appropriate value.
    5.             Debug.Log ("camMatrix " + cameraMatrix.dump ());
    6.            
    7.             MatOfDouble distCoeffs = new MatOfDouble (0, 0, 0, 0);
    8. //Please set an appropriate value.
    9.             Debug.Log ("distCoeffs " + distCoeffs.dump ());
    10.  
    11.  
    12.             List<Point> imagePointsList = new List<Point> ();
    13.             List<Point3> objectPointsList = new List<Point3> ();
    14.             //img points are green dots in the picture
    15.             imagePointsList.Add (new Point (271.0, 109.0));
    16.             imagePointsList.Add (new Point (65.0, 208.0));
    17.             imagePointsList.Add (new Point (334.0, 459.0));
    18.             imagePointsList.Add (new Point (600.0, 225.0));
    19.            
    20.             //object points are measured in millimeters because calibration is done in mm also
    21.             objectPointsList.Add (new Point3 (0.0, 0.0, 0.0));
    22.             objectPointsList.Add (new Point3 (-511.0, 2181.0, 0.0));
    23.             objectPointsList.Add (new Point3 (-3574.0, 2354.0, 0.0));
    24.             objectPointsList.Add (new Point3 (-3400.0, 0.0, 0.0));
    25.  
    26.             MatOfPoint2f imagePoints = new MatOfPoint2f ();
    27.             imagePoints.fromList (imagePointsList);
    28.             MatOfPoint3f objectPoints = new MatOfPoint3f ();
    29.             objectPoints.fromList (objectPointsList);
    30.            
    31.             Mat rvec = new Mat (1, 3, CvType.CV_64FC1);
    32.             Mat tvec = new Mat (1, 3, CvType.CV_64FC1);
    33.             Mat rotationMatrix = new Mat (3, 3, CvType.CV_64FC1);
    34.            
    35.             Calib3d.solvePnP (objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec);
    36.             Calib3d.Rodrigues (rvec, rotationMatrix);
    37.  
    38.  
    39.             Mat uvPoint = Mat.ones (3, 1, CvType.CV_64FC1); //u,v,1
    40. //            uvPoint.at<double>(0,0) = 363.; //got this point using mouse callback
    41. //            uvPoint.at<double>(1,0) = 222.;
    42.             uvPoint.put (0, 0, 363.0);
    43.             uvPoint.put (1, 0, 222, 0);
    44.  
    45.             Mat tempMat = new Mat ();
    46.             Mat tempMat2 = new Mat ();
    47.             double s;
    48.             tempMat = rotationMatrix.inv () * cameraMatrix.inv () * uvPoint;
    49.             tempMat2 = rotationMatrix.inv () * tvec;
    50. //            s = 285 + tempMat2.at<double>(2,0); //285 represents the height Zconst
    51.             s = 285 + tempMat2.get (2, 0) [0];
    52. //            s /= tempMat.at<double>(2,0);
    53.             s /= tempMat2.get (2, 0) [0];
    54.  
    55. //            std::cout << "P = " << rotationMatrix.inv() * (s * cameraMatrix.inv() * uvPoint - tvec) << std::endl;
    56.             Mat P = rotationMatrix.inv () * (s * cameraMatrix.inv () * uvPoint - tvec);
    57.             Debug.Log ("P = " + P.dump ());
     
  28. integence

    integence

    Joined:
    Feb 17, 2012
    Posts:
    19
    Thanks a lot for the reply. It moves me one step forward :)

    As I see everything works great for Mat uvPoint = new Mat(3, 1, CvType.CV_64FC1);
    but when I try to use MatOfPoint, MatOfPoint2f or MatOfPoint3f it doesn't work.

    Unfortunately my detection by findContours gives me a List<MatOfPoint> as a result;
    So I would like to use it in my reprojection function as well;

    Of course I can make For Loop and manually rewrite MatOfPoint into Mat, but I feel it is not the best solution, and maybe there is some ready to use method for that in OpenCV? (I tried Mat.convertTo, but it seems it is not what I need).
     
  29. ina

    ina

    Joined:
    Nov 15, 2010
    Posts:
    1,084
    Is this still the case that front facing portrait cam is only available via landscape mode?

    Also, any advice for cropping the matrix to be smaller?

     
  30. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    The reason why it does not work is that Mat's CVType is different.CV_64FC1 and CV_32FC2
    There seems to be no simple conversion method.
    Code (CSharp):
    1.             MatOfPoint2f matOfPoint2f = new MatOfPoint2f (new Point (363.0f, 222.0f));
    2.             Mat uvPoint = Mat.ones (3, 1, CvType.CV_64FC1);
    3.  
    4.             uvPoint.put (0, 0, matOfPoint2f.get (0, 0));
     
  31. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
  32. ina

    ina

    Joined:
    Nov 15, 2010
    Posts:
    1,084
  33. ginoadriano

    ginoadriano

    Joined:
    Dec 15, 2014
    Posts:
    2
    Hi @EnoxSoftware,

    i purchased and downloaded OpenCV for Unity and also the free RealtimeFaceRecognitionExample.
    For a project i'm currently working on i need to make a face-recognition demo where i can store some pictures of people, with their names as the title, in a folder.... then have the face recognition look for matches ...and when found highlight the face with the square bounding box and display the name of that person.
    I've been looking online for more information on how to do this but i can hardly find anything about doing just that in Unity. The documentation also doesn't mention anything about adding a custom "string" to a "new person" in the example.
    Please can you provide some pointers or an example on how to do this?

    I basicly just need it to do what is does allready, track faces, recognize....and then display name underneath or beside the bounding box.
     
  34. flamingfox

    flamingfox

    Joined:
    Apr 14, 2015
    Posts:
    8
    Hello @EnoxSoftware,

    My team work on a PC/Mac project game which use OpenCV for Unity, we process a stream or a video in our game.
    The stream works fine but is the video part where we have a problem.
    We have to process videos in different format : AVI or MP4 or MOV.
    On PC, when we give a .mp4 or a .mov file, VideoCapture don't what to open the file. On Mac it's fine.
    We think the FFmpeg.dll is the problem but we already did all recommendation you gave.
    We downloaded the FFmpeg .dll (opencv_ffmpeg320.dll and opencv_ffmpeg320_64.dll), set the windows path to point to .dll, copy the .dll in the root project of Unity. None of that resolve the problem.

    Here is a shard of our code to open videos on OpenCV :
    Code (CSharp):
    1. public void processVideo(string file)
    2. {
    3.      capture = new VideoCapture ();
    4.      if (System.IO.File.Exists(file))
    5.      {
    6.             Debug.Log("File exists");
    7.             Debug.Log("Capture open: " + capture.open(file));
    8.             Debug.Log("Capture is opened: " + capture.isOpened());
    9.      }
    10. }
    11.  
    12. /*** Log results : ***/
    13. File exists
    14. Capture open: false
    15. Capture is opened: false
    16.  
    We also test on your VideoCaptureExample scene with our .mp4 video, and it's the same result.

    We don't understand what we missed.
     
    Last edited: Apr 20, 2017
  35. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    ina likes this.
  36. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    In my environment these files worked correctly with VideoCaptureExample. Does it work in your environment?
    http://www.gomplayer.jp/img/sample/mp4_h264_aac.mp4
    http://www.gomplayer.jp/img/sample/mp4_mpeg4_aac.mp4
    http://www.gomplayer.jp/img/sample/mov_h264_aac.mov

    Unity 5.5.1f1
    OpenCV for Unity 2.1.6
     
  37. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    OpenCV for Unity
    Released Version 2.1.6


    Version changes
    2.1.6
    [Common]Fixed fastMatToTexture2D() method.
     
  38. ina

    ina

    Joined:
    Nov 15, 2010
    Posts:
    1,084
  39. flamingfox

    flamingfox

    Joined:
    Apr 14, 2015
    Posts:
    8
    Hello @EnoxSoftware,
    Thanks, yep it works now. Like in most case, the problem is between the keyboard and the chair. We took the wrong dll of FFmpeg or we didn't configure correctly Unity with the dll when we put them in the project.

    It works fine now, we are sorry that we spent your time for relatively nothing.
     
  40. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
  41. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    It is also possible to use VideoPlayer class instead of VideoCapture class of OpenCV.
    https://github.com/EnoxSoftware/VideoPlayerWithOpenCVForUnityExample
     
  42. ina

    ina

    Joined:
    Nov 15, 2010
    Posts:
    1,084
    What is the most optimized way to use ROI with webcamTexture?

    Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

    Would manually crop the colors32 be better than converting the webcamTexture to mat first to crop and then submat? Added a new method to Utils: WebCamTextureToMatROI() - but not sure how performant it is


    Code (CSharp):
    1. public static void WebCamTextureToMatROI(WebCamTexture wct,Mat mat,Color32[] colors,Rect roi){
    2.             Mat mat0 = new Mat (wct.height, wct.width, CvType.CV_8UC4);
    3.             webCamTextureToMat (wct, mat0,colors);
    4.             mat = mat0.submat (roi);
    5.         }
    Not sure what the proper way to send in the Rect is actually - this does not seem to work
    Code (CSharp):
    1.  
    2. Rect r = new OpenCVForUnity.Rect( (int)(rgbaMat.width() * 0.1f),(int)(rgbaMat.height() * 0.1f),(int)(rgbaMat.width() * 0.8f),(int)(rgbaMat.height()*0.8f) );
    3. Utils.WebCamTextureToMatROI (webCamTexture, rgbaMat, colors, r);
    4.  
     
    Last edited: Apr 25, 2017
  43. TBruce

    TBruce

    Joined:
    Jan 18, 2015
    Posts:
    86
    Hi @EnoxSoftware,

    For the project I am working on i need to be able to match a template that has alpha to what is received on the webcam. Would you be able to tell me how I could best accomplish this?
     
    Last edited: Apr 26, 2017
  44. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Please refer to examples of Java.
    Code (CSharp):
    1.         void Update ()
    2.         {
    3.             if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
    4.  
    5.                 Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
    6.  
    7.  
    8.                 Mat roiMat = new Mat (rgbaMat, new OpenCVForUnity.Rect (100, 100, 300, 300));
    9.              
    10.                 Imgproc.rectangle (roiMat, new Point (0, 0), new Point (300, 300), new Scalar (255, 0, 0, 255), 5);
    11.                 Imgproc.line (roiMat, new Point (0, 0), new Point (300, 300), new Scalar (255, 0, 0, 255), 5);
    12.                 Imgproc.putText (roiMat, "W:" + roiMat.width () + " H:" + roiMat.height (), new Point (5, roiMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
    13.  
    14.                 roiMat.Dispose ();
    15.  
    16.  
    17.                 Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
    18.  
    19.                 Utils.matToTexture2D (rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors ());
    20.             }
    21.         }
    roi.PNG
     
  45. ina

    ina

    Joined:
    Nov 15, 2010
    Posts:
    1,084
    I guess I am trying to just return a smaller Mat directly in GetMat(), but it seems that the new smaller sized mat is not assigned? I'm primarily using a modified Utils.WebCamTextureToMat -

    WebCamTextureToMatHelper.cs
    Code (CSharp):
    1.  
    2.        public Mat GetMat ()
    3.         {
    4.             if (!initDone || !webCamTexture.isPlaying) {
    5.                 if (rotatedRgbaMat != null) {
    6.                     return rotatedRgbaMat;
    7.                 } else {
    8.                     return rgbaMat;
    9.                 }
    10.             }
    11.  
    12.             Utils.WebCamTextureToMatROI (webCamTexture,...
    13.  
    Utils.cs
    Code (CSharp):
    1.  
    2. public static void WebCamTextureToMatROI(WebCamTexture wct, Mat mat,Color32[] colors,Rect roi){
    3.             //new Mat (wct.height, wct.width, CvType.CV_8UC4);
    4.             webCamTextureToMat (wct, mat, colors);
    5.             mat =  new Mat(mat,roi);
    6.         }
    7.  
    8.  
     
  46. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Copying Mat is necessary.
    Code (CSharp):
    1. using UnityEngine;
    2. using System.Collections;
    3. using UnityEngine.UI;
    4.  
    5. #if UNITY_5_3 || UNITY_5_3_OR_NEWER
    6. using UnityEngine.SceneManagement;
    7. #endif
    8. using OpenCVForUnity;
    9.  
    10. namespace OpenCVForUnityExample
    11. {
    12.     /// <summary>
    13.     /// WebCamTexture to mat example.
    14.     /// </summary>
    15.     [RequireComponent(typeof(WebCamTextureToMatHelper))]
    16.     public class WebCamTextureToMatHelperExample : MonoBehaviour
    17.     {
    18.         Mat cropMat;
    19.  
    20.         OpenCVForUnity.Rect cropRect;
    21.  
    22.         /// <summary>
    23.         /// The texture.
    24.         /// </summary>
    25.         Texture2D texture;
    26.  
    27.         /// <summary>
    28.         /// The web cam texture to mat helper.
    29.         /// </summary>
    30.         WebCamTextureToMatHelper webCamTextureToMatHelper;
    31.  
    32.         /// <summary>
    33.         /// The is flip vertical toggle.
    34.         /// </summary>
    35.         public Toggle isFlipVerticalToggle;
    36.        
    37.         /// <summary>
    38.         /// The is flip horizontal toggle.
    39.         /// </summary>
    40.         public Toggle isFlipHorizontalToggle;
    41.  
    42.         // Use this for initialization
    43.         void Start ()
    44.         {
    45.             Utils.setDebugMode (true);
    46.  
    47.             webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
    48.             webCamTextureToMatHelper.Init ();
    49.  
    50.             isFlipHorizontalToggle.isOn = webCamTextureToMatHelper.flipVertical;
    51.             isFlipHorizontalToggle.isOn = webCamTextureToMatHelper.flipHorizontal;
    52.         }
    53.  
    54.         /// <summary>
    55.         /// Raises the web cam texture to mat helper inited event.
    56.         /// </summary>
    57.         public void OnWebCamTextureToMatHelperInited ()
    58.         {
    59.             Debug.Log ("OnWebCamTextureToMatHelperInited");
    60.  
    61.             cropRect = new OpenCVForUnity.Rect (100, 100, 200, 200);
    62.             cropMat = new Mat (cropRect.height, cropRect.width, CvType.CV_8UC4);
    63.  
    64.             Mat webCamTextureMat = new Mat (cropRect.height, cropRect.width, CvType.CV_8UC4);
    65.  
    66.             texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
    67.  
    68.             gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
    69.  
    70.             gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
    71.             Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
    72.  
    73.                                    
    74.             float width = webCamTextureMat.width ();
    75.             float height = webCamTextureMat.height ();
    76.                                    
    77.             float widthScale = (float)Screen.width / width;
    78.             float heightScale = (float)Screen.height / height;
    79.             if (widthScale < heightScale) {
    80.                 Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
    81.             } else {
    82.                 Camera.main.orthographicSize = height / 2;
    83.             }
    84.         }
    85.  
    86.         /// <summary>
    87.         /// Raises the web cam texture to mat helper disposed event.
    88.         /// </summary>
    89.         public void OnWebCamTextureToMatHelperDisposed ()
    90.         {
    91.             Debug.Log ("OnWebCamTextureToMatHelperDisposed");
    92.  
    93.         }
    94.  
    95.         /// <summary>
    96.         /// Raises the web cam texture to mat helper error occurred event.
    97.         /// </summary>
    98.         /// <param name="errorCode">Error code.</param>
    99.         public void OnWebCamTextureToMatHelperErrorOccurred (WebCamTextureToMatHelper.ErrorCode errorCode)
    100.         {
    101.             Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
    102.         }
    103.  
    104.         // Update is called once per frame
    105.         void Update ()
    106.         {
    107.             if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
    108.  
    109.                 Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
    110.  
    111.                 Mat roiMat = new Mat (rgbaMat, cropRect);
    112.  
    113.                 roiMat.copyTo (cropMat);
    114.  
    115.                 roiMat.Dispose ();
    116.  
    117.                 Imgproc.putText (cropMat, "X:" + cropRect.x + " Y:" + cropRect.y + " W:" + cropRect.width + " H:" + cropRect.height, new Point (5, cropMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
    118.  
    119.                 Utils.matToTexture2D (cropMat, texture);
    120.             }
    121.         }
    122.    
    123.         /// <summary>
    124.         /// Raises the disable event.
    125.         /// </summary>
    126.         void OnDisable ()
    127.         {
    128.             webCamTextureToMatHelper.Dispose ();
    129.  
    130.             Utils.setDebugMode (false);
    131.         }
    132.  
    133.         /// <summary>
    134.         /// Raises the back button event.
    135.         /// </summary>
    136.         public void OnBackButton ()
    137.         {
    138.             #if UNITY_5_3 || UNITY_5_3_OR_NEWER
    139.             SceneManager.LoadScene ("OpenCVForUnityExample");
    140.             #else
    141.             Application.LoadLevel ("OpenCVForUnityExample");
    142.             #endif
    143.         }
    144.  
    145.         /// <summary>
    146.         /// Raises the play button event.
    147.         /// </summary>
    148.         public void OnPlayButton ()
    149.         {
    150.             webCamTextureToMatHelper.Play ();
    151.         }
    152.  
    153.         /// <summary>
    154.         /// Raises the pause button event.
    155.         /// </summary>
    156.         public void OnPauseButton ()
    157.         {
    158.             webCamTextureToMatHelper.Pause ();
    159.         }
    160.  
    161.         /// <summary>
    162.         /// Raises the stop button event.
    163.         /// </summary>
    164.         public void OnStopButton ()
    165.         {
    166.             webCamTextureToMatHelper.Stop ();
    167.         }
    168.  
    169.         /// <summary>
    170.         /// Raises the change camera button event.
    171.         /// </summary>
    172.         public void OnChangeCameraButton ()
    173.         {
    174.             webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
    175.         }
    176.  
    177.         /// <summary>
    178.         /// Raises the is showing face points toggle event.
    179.         /// </summary>
    180.         public void OnIsFlipVerticalToggle ()
    181.         {
    182.             if (isFlipVerticalToggle.isOn) {
    183.                 webCamTextureToMatHelper.flipVertical = true;
    184.             } else {
    185.                 webCamTextureToMatHelper.flipVertical = false;
    186.             }
    187.         }
    188.        
    189.         /// <summary>
    190.         /// Raises the is showing axes toggle event.
    191.         /// </summary>
    192.         public void OnIsFlipHorizontalToggle ()
    193.         {
    194.             if (isFlipHorizontalToggle.isOn) {
    195.                 webCamTextureToMatHelper.flipHorizontal = true;
    196.             } else {
    197.                 webCamTextureToMatHelper.flipHorizontal = false;
    198.             }
    199.         }
    200.     }
    201. }
     
  47. Ayodhya1991

    Ayodhya1991

    Joined:
    Apr 24, 2017
    Posts:
    3
    Hi @EnoxSoftware,

    I am currently working with FaceTracker AR example.
    If i want to overlay only some face area like eyes,lips etc.
    How it can be done.
    I have openCV for Unity Plug in with me.

    Thanks in Advance.
     
    Last edited: Apr 28, 2017
  48. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    I have no such example. Please refer to MatchTemplateExample.
     
  49. TBruce

    TBruce

    Joined:
    Jan 18, 2015
    Posts:
    86
    I understand that you have not provided an example of matching an image/template to another. I have been working with the MatchTemplateExample, that demo is what led me to purchase OpenCV for Unity in the first place.

    I have been trying to match a template using a png image with transparency, with it import settings pretty much set up the same as the original template (with the exception is transparency) and have had no luck.

    There was some chatter on the net a few years ago about doing this but nothing ever panned out. I was hoping with your experience with OpenCV you could help out, not to mention that it would be a nice addition to your list of demos.

    Thanks in advance!
     
  50. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,564
    Please add the GameObject you want to display to the children of ARObjects.
    facetracker.PNG