Search Unity

  1. Looking for a job or to hire someone for a project? Check out the re-opened job forums.
    Dismiss Notice
  2. Unity 2020 LTS & Unity 2021.1 have been released.
    Dismiss Notice
  3. Good news ✨ We have more Unite Now videos available for you to watch on-demand! Come check them out and ask our experts any questions!
    Dismiss Notice

[RELEASED] OpenCV for Unity

Discussion in 'Assets and Asset Store' started by EnoxSoftware, Oct 30, 2014.

  1. ctedin187

    ctedin187

    Joined:
    Aug 20, 2018
    Posts:
    11
    upload_2020-9-21_14-28-28.png
     
    EnoxSoftware likes this.
  2. ctedin187

    ctedin187

    Joined:
    Aug 20, 2018
    Posts:
    11
    While I have you on. I've been looking into YOLACT or YOLACT++ libraries for background segmentation. Do you think it would be possible to implement any of those libraries into this? It would be fantastic to implement those for background replacement.
    The green screen OpenCV tool is great, but we can't use a green screen in our situation at all.
     
  3. runtodev

    runtodev

    Joined:
    Nov 30, 2012
    Posts:
    4
    Hello, EnoxSoftware,

    I bought the plugin and I am testing the matchTemplate function...

    In the matchTemplate function example(https://github.com/EnoxSoftware/Ope.../MatchTemplateExample/MatchTemplateExample.cs),

    I could got the result, but I have to convert to result Mat to Texture2D,

    As I proceeded, I tried to convert result Mat to Texture2D by using Utils.MatToTexture2D function.

    but I got the error message that is "The Mat object must have the types 'CV_8UC4' (RGBA) , 'CV_8UC3' (RGB) or 'CV_8UC1' (GRAY)."

    and I found that matchTemplate function only make a result as 32 bit mat.

    How do I have to do? can I convert this result mat to 8bit(CV_8UC4) mat?

    Or is there anything else that I can do?

    Thanks in advance...
     
  4. runtodev

    runtodev

    Joined:
    Nov 30, 2012
    Posts:
    4
    I got progress after above question.

    but I have an another question.

    I used findContours function and I could get the contours of scanned image(it's actually image drawed in white paper).

    but How can I crop the part of image alongside contours (not rectangular type, just exactly along with contour)?
    I mean I want the inside part of found contours.

    What method can I try to get that part?
     
  5. aguroshou4620502

    aguroshou4620502

    Joined:
    Jun 28, 2020
    Posts:
    3
    Thank you for answering my question.
    But unfortunately, I didn't solve it.

    About, "Videowriter" scene's "VideoWriterExample.cs".
    In a desktop application, I can use "writer.open(savePath, VideoWriter.fourcc('H', '2', '6', '4'), 30, new Size(Screen.width, Screen.height));" to save a movie with ".mp4" codec.

    But, In a WebGL application, I can't use "writer.open(savePath, VideoWriter.fourcc('H', '2', '6', '4'), 30, new Size(Screen.width, Screen.height));" to save a movie with ".mp4" codec.

    If you know how to save a movie with ".mp4" in a WebGL application, please teach me how to do it.

    I can use " writer.open(savePath, VideoWriter.fourcc('M', 'J', 'P', 'G'), 30, new Size(Screen.width, Screen.height));" in a WebGL application. but the codec is ".avi".
    ".avi" is not good because I can't load in a ZOOM's virtual background movie.

    Thank you for reading.
     
  6. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Code (CSharp):
    1.             Imgproc.matchTemplate (imgMat, tempMat, result, match_method);
    2.  
    3.             //Imgproc.threshold (result, result, 0.8, 1.0, Imgproc.THRESH_TOZERO);//threshold = 0.8
    4.  
    5.             //for (int i = 0; i < result.rows (); i++) {
    6.             //    for (int j = 0; j < result.cols (); j++) {
    7.             //        if (result.get (i, j) [0] > 0) {
    8.  
    9.             //            Imgproc.rectangle (imgMat, new Point (j, i), new Point (j + tempMat.cols (), i + tempMat.rows ()), new Scalar (255, 0, 0, 255), 2);
    10.             //            Debug.Log ("value" + result.get (i, j) [0]);
    11.             //        }
    12.             //    }
    13.             //}
    14.  
    15.             Mat resultMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC1);
    16.  
    17.             result.convertTo(resultMat, CvType.CV_8UC1,255);
    18.  
    19.             Texture2D texture = new Texture2D(resultMat.cols(), resultMat.rows(), TextureFormat.RGBA32, false);
    20.             Utils.matToTexture2D(resultMat, texture);
    matchTemplate.PNG
     
  7. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    You can use mask to copy only some of the pixels.
    Code (CSharp):
    1.             /// Find srcContours
    2.             Imgproc.findContours (srcMat, srcContours, srcHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
    3.  
    4.             //maskMat
    5.             Mat maskMat = new Mat(dstTexture.height, dstTexture.width, CvType.CV_8UC1, new Scalar(0));
    6.  
    7.             //Fill the inside of the contour of the maskMat with white.
    8.             Imgproc.drawContours(maskMat, srcContours, -1, new Scalar(255), -1);
    9.  
    10.             //Copy only the inside of the contour into dstMat using maskMat.
    11.             srcMat.copyTo(dstMat, maskMat);
     
  8. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Unfortunately, WebGL platforms do not seem to support VideoWriter.fourcc('H', '2', '6', '4'). I also tried VideoWriter.fourcc('m', 'p', '4', 'v') but the writer.open() method failed.
     
    aguroshou4620502 likes this.
  9. aguroshou4620502

    aguroshou4620502

    Joined:
    Jun 28, 2020
    Posts:
    3
    I see. I'm going to try another way to change the movie's codec ".avi" to ".mp4".
    Thank you for answering my question.
     
  10. Kluarc

    Kluarc

    Joined:
    May 1, 2018
    Posts:
    4
    Hi,

    I have trained 6 objects (phones) to be detected, added their respective weights and cfg files to the YOLO object detection scene in opencv. There are false positives detected for a few devices, the wall and the door have been detected as objects. The Conf threshold is set above .95 for the detection. How do we negate such scenarios. I have added a few screen shots. Any suggestions would help.

    PS: The training images do not have any reflection on them.
     

    Attached Files:

  11. kbop2000

    kbop2000

    Joined:
    Apr 17, 2019
    Posts:
    9
    Thanks for make plugin

    this plugin supported Job system?
     
  12. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    I have no example of combining this plugin with JobSystem.
     
  13. AconitumNapellum

    AconitumNapellum

    Joined:
    Sep 28, 2018
    Posts:
    3
    Hi! Two questions:

    1. My webcam, in your demos in the free trial package, appears very dark and kinda bluish in color. Could it be that the webcam uses a strange color space?

    2. I'm trying to build a QR code scanner: first I need to detect the contours of a sheet of paper on a dark background, then after I managed to get the warped Mat, I would need to decode a QR found on the top left of the page. I managed to get OpenCV to recognize the QR, but I can't manage to decode it.

    Details: I start with a 3264x2248 texture in the form of a Color32[] using AVPro Live Camera plugin. Then I do contour manipulation to get a Mat of exactly the page ( similar as to what this guy has done ). After that I try to decode a QR.

    Things I tried: Cropping only the area of the QR, making the Mat gray, shrinking the Mat.

    I don't really know what to do! Been trying to do this for a few days now. Even almost copying the entire code of the QRCodeDetectorWebCamTextureExample demo, only using AVPro Video Live Camera feed, isn't working.
     
  14. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Could you try AvProLiveCameraWithOpenCVExampe?
    1. Import UnityPlugin-AVProLiveCamera-Latest-Trial.unitypackage
    2. Import OpenCV for Unity v2.4.0
    3. Import AVProLiveCameraWithOpenCVExampe.unitypackage

    AVProLiveWithOpenCV.PNG
     

    Attached Files:

  15. majilisaias

    majilisaias

    Joined:
    Sep 29, 2018
    Posts:
    1
    Hi, I'm using the Magic Leap with OpenCV for Unity Example project, however during the testing I get two errors of failing to start MLCamera and one of failing to create native cv camera tracker. Is this due to using the free trial of OpenCv for Unity?
     
  16. AconitumNapellum

    AconitumNapellum

    Joined:
    Sep 28, 2018
    Posts:
    3
    This seems to work a lot better, going to try and compare my code to yours and understand what's wrong. Thanks for your fast response!
     
  17. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Unfortunately, the trial version only works with UnityEditor.
     
  18. Octarina

    Octarina

    Joined:
    Oct 4, 2019
    Posts:
    5
    Hi,
    I would like to set up SimpleBlobDetector parameters dynamically. Is there a way to set parameters without loading them from a yaml file ?
    I can't find anything in that way in the documentation.
     
  19. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Octarina likes this.
  20. Roel03

    Roel03

    Joined:
    Jan 26, 2016
    Posts:
    3
    Hi,
    I'm trying to make this work with Hololens 2 but I get the error: Unable to load DLL 'opencvforunity': The specified module could not be found.

    Hololens 2 requires ARM64 and the package doesn't have the dlls for ARM64. Is there a plan to integrate dlls that work with ARM64?
    Or is there something else I can change to make this work?
     
  21. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    The dlls for ARM64 are included in OpenCVForUnity2.4.0.
    UWP_ARM64.PNG
     
  22. sidbhise

    sidbhise

    Joined:
    May 13, 2020
    Posts:
    7
    Hello,

    I am facing an issue while doing a matchTemplate using the Opencvforunity plugin. It runs well in iOS, but fails on Android SamsunGalaxy S6 Tab. The app crashes as soon as it hit the below method-

    //Code -

    ——————

    Imgproc.matchTemplate(imgMat, tempMat, result, match_method);

    //normalize(result, result, 0, 1, NORM_MINMAX, -1, new Mat());

    //Imgproc.threshold(result, result, 0.9, 1.0, Imgproc.THRESH_TOZERO);//threshold = 0.8

    minMaxLocResult = minMaxLoc(result);



    Crash log -

    ———————

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #00 pc 006f8c6c /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libopencvforunity.so (cv::_InputArray::dims(int) const+380) (BuildId: c34742ba370121bb052b67f7fde3a427fa6202a0)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #01 pc 0070351b /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libopencvforunity.so (cv::minMaxLoc(cv::_InputArray const&, double*, double*, cv::point_<int>*, cv::point_<int>*, cv::_InputArray const&)+46) (BuildId: c34742ba370121bb052b67f7fde3a427fa6202a0)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #02 pc 0013ca8f /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libopencvforunity.so (core_Core_n_1minMaxLocManual+102) (BuildId: c34742ba370121bb052b67f7fde3a427fa6202a0)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #10 pc 001bd031 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #11 pc 001c664f /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #12 pc 001cb4c5 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #13 pc 001cb219 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #14 pc 000f3723 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #15 pc 0015e977 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #16 pc 0015e993 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #17 pc 0015eb27 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #18 pc 0021081d /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:28.916 28858-28876/? E/CRASH: #19 pc 0021e08d /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:29.490 28858-28876/? E/AndroidRuntime: FATAL EXCEPTION: UnityMain

    Process: com., PID: 28858

    java.lang.Error: *** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***

    Build type 'Release', Scripting Backend 'il2cpp', CPU 'armeabi-v7a'

    Build fingerprint: 'samsung/gts6l’

    Revision: '8'

    ABI: 'arm'

    Timestamp: 2020-10-23 19:54:28+0530

    pid: 28858, tid: 28876, name: UnityMain >>> com. <<<

    uid: 10438

    signal 11 (SIGSEGV), code 1 (SEGV_MAPERR), fault addr 0x4

    Cause: null pointer dereference

    r0 00000000 r1 c86fe958 r2 00000001 r3 00000001

    r4 ffffffff r5 ea52cd94 r6 00000000 r7 c86fe8f8

    r8 c86fe978 r9 c86fe980 r10 ea52cd94 r11 c86fe9d0

    ip c2c29014 sp c86fe8d0 lr c235d51f pc c2352c6c



    backtrace:

    #00 pc 006f8c6c /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libopencvforunity.so (cv::_InputArray::dims(int) const+380) (BuildId: c34742ba370121bb052b67f7fde3a427fa6202a0)

    #01 pc 0070351b /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libopencvforunity.so (cv::minMaxLoc(cv::_InputArray const&, double*, double*, cv::point_<int>*, cv::point_<int>*, cv::_InputArray const&)+46) (BuildId: c34742ba370121bb052b67f7fde3a427fa6202a0)

    #02 pc 0013ca8f /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libopencvforunity.so (core_Core_n_1minMaxLocManual+102) (BuildId: c34742ba370121bb052b67f7fde3a427fa6202a0)

    #03 pc 00f0b9f4 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libil2cpp.so (BuildId: 6b1615305ace34fe60d94f4bd5bb21f09d2f6312)

    #04 pc 00f0b794 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libil2cpp.so (BuildId: 6b1615305ace34fe60d94f4bd5bb21f09d2f6312)

    #05 pc 01336bb8 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libil2cpp.so (BuildId: 6b1615305ace34fe60d94f4bd5bb21f09d2f6312)

    #06 pc 0067a454 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libil2cpp.so (BuildId: 6b1615305ace34fe60d94f4bd5bb21f09d2f6312)

    #07 pc 00eb6990 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libil2cpp.so (BuildId: 6b1615305ace34fe60d94f4bd5bb21f09d2f6312)

    #08 pc 002eb370 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libil2cpp.so (BuildId: 6b1615305ace34fe60d94f4bd5bb21f09d2f6312)

    #09 pc 003e15d8 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libil2cpp.so (BuildId: 6b1615305ace34fe60d94f4bd5bb21f09d2f6312)

    #10 pc 001bd031 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    #11 pc 001c664f /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    #12 pc 001cb4c5 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    #13 pc 001cb219 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    #14 pc 000f3723 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    #15 pc 0015e977 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    #16 pc 0015e993 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    #17 pc 0015eb27 /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    #18 pc 0021081d /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    2020-10-23 19:54:29.491 28858-28876/? E/AndroidRuntime: #19 pc 0021e08d /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/lib/arm/libunity.so (BuildId: e14b941ba32112f8c2552f879f7ab2260076573b)

    #20 pc 000067bb /data/app/com.-C5gTlzC3vLB9nTRN83vCUg==/oat/arm/base.odex



    at libopencvforunity.cv::_InputArray::dims(int) const(dims:380)

    at libopencvforunity.cv::minMaxLoc(cv::_InputArray const&, double*, double*, cv::point_<int>*, cv::point_<int>*, cv::_InputArray const&)(minMaxLoc:46)

    at libopencvforunity.core_Core_n_1minMaxLocManual(core_Core_n_1minMaxLocManual:102)

    at libil2cpp.0xf0b9f4(Native Method)

    at libil2cpp.0xf0b794(Native Method)

    at libil2cpp.0x1336bb8(Native Method)

    at libil2cpp.0x67a454(Native Method)

    at libil2cpp.0xeb6990(Native Method)

    at libil2cpp.0x2eb370(Native Method)

    at libil2cpp.0x3e15d8(Native Method)

    at libunity.0x1bd031(Native Method)

    at libunity.0x1c664f(Native Method)

    at libunity.0x1cb4c5(Native Method)

    at libunity.0x1cb219(Native Method)

    at libunity.0xf3723(Native Method)

    at libunity.0x15e977(Native Method)

    at libunity.0x15e993(Native Method)

    at libunity.0x15eb27(Native Method)

    at libunity.0x21081d(Native Method)

    at libunity.0x21e08d(Native Method)

    at base.0x67bb(Native Method)

    2020-10-23 19:54:29.778 1375-1642/? W/InputDispatcher: channel '4e2c7c9 com./com.unity3d.player.UnityPlayerActivity (server)' ~ Consumer closed input channel or an error occurred. events=0x9, fd=838

    2020-10-23 19:54:29.778 1375-1642/? E/InputDispatcher: channel '4e2c7c9 com./com.unity3d.player.UnityPlayerActivity (server)' ~ Channel is unrecoverably broken and will be disposed!
     
  23. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Thank you very much for reporting.
    Could you send me the code you've tested?
    https://enoxsoftware.com/opencvforunity/contact/technical-inquiry/
     
  24. Xtrenaline

    Xtrenaline

    Joined:
    Oct 29, 2020
    Posts:
    1
    Hello,

    Can I use Opencvforunity to implement a face recognition application on HoloLens 2?

    Thank you.
     
  25. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    ARM64 library is included in OpenCVForUnity. This ARM64 library worked with Lenovo Yoga C630 (Snapdragon850 ARM64) without any problem.
    I don't own a HoloLens2, but I've received reports from several people that it worked with the HoloLen2.
     
  26. LessThanEpic

    LessThanEpic

    Joined:
    Aug 22, 2014
    Posts:
    13
    I just bought this asset and I'm trying to get the FaceMark example to work in the editor, but I always get the following error:

    model file is not loaded. The facemark model file can be downloaded here:


    However, I have downloaded the file and it is in the correct location (Assets/StreamingAssets/facemark/lbfmodel). Please advise.
     
  27. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Is the file extension yaml?
    facemask.PNG
     
  28. LessThanEpic

    LessThanEpic

    Joined:
    Aug 22, 2014
    Posts:
    13
    It was yml. Changing it to yaml fixed it. Thanks!
     
  29. VRxMedical

    VRxMedical

    Joined:
    Mar 8, 2018
    Posts:
    3
    Hi, Can you please help me to add transparency to imgproc.rectangle?
     
  30. sofianehamza

    sofianehamza

    Joined:
    Feb 13, 2017
    Posts:
    14
    I've just purchased open cv...using it with playmaker actions; I wanted to learn about the easiest way to detect a marker and then substitute it with a game object
    there are so many playmaker actions; I'm totally lost; there aren't any tutorials
    I really appreciate any help you can provide.
     
  31. sofianehamza

    sofianehamza

    Joined:
    Feb 13, 2017
    Posts:
    14
    Hello,
    after downloading the ar based marker example
    I'm getting this error
    when the animation keeps freezing as shown in the video

    thanks in advance
     
  32. j3vr0n

    j3vr0n

    Joined:
    Oct 26, 2020
    Posts:
    3
    Hi, I'm attempting to load the YOLO object detection assets to run the object detection. I've loaded the appropriate files into the StreamingAssets/dnn folder, but the programs still says "model file not loaded."

    upload_2020-11-16_13-44-7.png

    upload_2020-11-16_13-44-23.png
     
  33. j3vr0n

    j3vr0n

    Joined:
    Oct 26, 2020
    Posts:
    3
    I've also tried in Unity version 2019.10f2 with the same results.

    upload_2020-11-16_14-1-10.png
     
  34. j3vr0n

    j3vr0n

    Joined:
    Oct 26, 2020
    Posts:
    3
    was this ever solved?
     
  35. jevon-williams

    jevon-williams

    Joined:
    Nov 19, 2020
    Posts:
    3
    Hi, I recently purchased the OpenCV for Unity asset under the assumption that it would work with decent performance using Yolo. Does anyone have recommendations on the best DNN algorithm to use for OpenCV on a mobile device's hardware (iPhone 12)? I've attempted training models for both Yolo V4 and Yolo V3 Tiny and only attain 3-4 FPS in the OpenCV asset within Unity and on iPhone. However, these models seems to run fine in other applications such as Neural Vision on a mobile device. Are there any other models that work better with OpenCV for Unity or are these scripts just not optimized for performance? I've noticed there are other mobile apps released recently (such as iDetection and Objects) that utilize different versions of Yolo with success, I'd just much rather not have to convert to CoreML to get performance on mobile devices if it's possible in this asset.
     
    Last edited: Nov 19, 2020
  36. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Imgproc.rectangle method does not support alpa-transparency.

    https://docs.opencv.org/3.4/d6/d6e/group__imgproc__draw.html
    Note
    The functions do not support alpha-transparency when the target image is 4-channel. In this case, the color[3] is simply copied to the repainted pixels. Thus, if you want to paint semi-transparent shapes, you can paint them in a separate buffer and then blend it with the main image.

    In order to blend the images it seems that you need to use the addWeighted method.
    https://stackoverflow.com/questions/26485488/add-transparency-when-drawing-a-rectangle-in-opencv
     
  37. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Have you already moved the "OpenCVForUnity/StreamingAssets/" folder to the "Assets/" folder?
     
  38. jevon-williams

    jevon-williams

    Joined:
    Nov 19, 2020
    Posts:
    3
    Hi, do you have a sample for onnx with the WebCamTexture like you have for the Yolo and Tensorflow samples?
     
  39. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
  40. midhapulkit28

    midhapulkit28

    Joined:
    Jan 18, 2019
    Posts:
    7
    The MaskRCNN is not working at all?
     
  41. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Can you tell me the environment you tested?
    Unity Version :
    Editor Platform :
    Build Platform :
     
  42. h9interaction

    h9interaction

    Joined:
    Mar 29, 2018
    Posts:
    1
    Hi,

    I was trying out the facemark APIs, and was wondering if there is any way to allow for a custom number of facemarks?
    I've tried giving it a custom yaml model with 9 facemarks, but the API always seem to be returning 68 elements.
    I'm not sure, but I think the issue might be with the call to face_Facemark_fit_10().
    I've looked at the opencv_contrib code for the facemark module, and there seems to be some hard-coded values for the facemark number.
    Specifically, it seems like the _copyVector2Output() and data_augmentation() functions in facemarkLBF.cpp would need tinkering.

    Could you please let me know what you think?
    This is an absolutely key feature for our project, and it would be really great if this can be supported.

    Thanks!
     
  43. nsmith1024

    nsmith1024

    Joined:
    Mar 18, 2014
    Posts:
    830
    Hello
    I want to buy your OpenCV for Unity.
    I want to use it for pose estimation, for example to get the positions of the body (head arms legs etc). I downloaded the demo version, and tried the HandPostEstimation demo scene, but i dont know how to use it, it doesnt seem to do anything, it just said "touch the area of the open hand", but i dont understand how to do that in the editor.
    Can you please tell me how to use it in the demo so i can see if it really works?
    Thanks
     
  44. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Unfortunately, the OpenCV FaceMark API seems to support only 68-point models.
    If you want to use a model other than 68 points, I recommend using DlibFaceLandmarkDetector instead of FaceMark. DlibFaceLandmarkDetector allows you to use your own trained models.
    https://assetstore.unity.com/packages/tools/integration/dlib-facelandmark-detector-64314
     
  45. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    The HandPostEstimationExample is an example of the following demo rewritten using OpenCVforUntiy. The following video is helpful in its usage.
    The techniques used are color segmentation using HSV color space to find the hand contour, and convex hull and convex defect algorithms to count the number of fingers.​
    https://github.com/h3ct0r/hand_finger...


    Also,
    The OpenPoseExample is an example of estimating whole body and finger positions using the dnn module.
     
    Lanre likes this.
  46. sasob8

    sasob8

    Joined:
    Jul 11, 2017
    Posts:
    16
    I want to customize "YoloObjectDetectionWebCamTextureExample". Actualy i want to strip it down to basics and only use what i need.
    I want to pass my own texture into material, and than go through yolo postprocess. But i get glitchy output, with lots of green rectangles. Original example works fine, so i am missing something.

    In this part i pass webcam image to "webCamTexture2D", and this texture to "imgMat":
    Code (CSharp):
    1.     private void Awake() {
    2.         webCamTexture = new WebCamTexture();
    3.         webCamTexture.Play();
    4.  
    5.         data = new Color32[webCamTexture.width * webCamTexture.height];
    6.         webCamTexture2D = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGB24, true);
    7.         gameObject.GetComponent<Renderer>().material.mainTexture = webCamTexture2D;
    8.     }
    9.  
    10.     void Update() {
    11.         webCamTexture.GetPixels32(data);
    12.  
    13.         webCamTexture2D.SetPixels32(data);
    14.         webCamTexture2D.Apply();
    15.  
    16.         if (webCamTexture2D != null) {
    17.             if (imgMat != null) {
    18.                 imgMat.Dispose();
    19.                 imgMat = null;
    20.             }
    21.  
    22.             imgMat = new Mat(webCamTexture2D.height, webCamTexture2D.width, CvType.CV_8UC4, new Scalar(0, 0, 0, 255));
    23.             Utils.texture2DToMat(webCamTexture2D, imgMat);
    24.         }
    25.     }
    And in this part i postprocess the material, but i get back around 100 of classIds (rectangles all over image)
    Code (CSharp):
    1.     private void Start() {
    2.         if (!string.IsNullOrEmpty(classes)) classes_filepath = Utils.getFilePath("dnn/" + classes);
    3.         if (!string.IsNullOrEmpty(config)) config_filepath = Utils.getFilePath("dnn/" + config);
    4.         if (!string.IsNullOrEmpty(model)) model_filepath = Utils.getFilePath("dnn/" + model);
    5.  
    6.         classNames = readClassNames(classes_filepath);
    7.  
    8.         net = Dnn.readNet(model_filepath, config_filepath);
    9.         outBlobNames = getOutputsNames(net);
    10.         outBlobTypes = getOutputsTypes(net);
    11.     }
    12.  
    13.     private void Update() {
    14.         if (arCamSnapshot.imgMat != null && bgrMat == null) {
    15.  
    16.             rawImageTexture = new Texture2D(arCamSnapshot.imgMat.cols(), arCamSnapshot.imgMat.rows(), TextureFormat.RGBA32, false);
    17.             rawImage.gameObject.GetComponent<RectTransform>().sizeDelta = new Vector2(rawImageTexture.width, rawImageTexture.height);
    18.             rawImage.texture = rawImageTexture;
    19.         }
    20.  
    21.         if (arCamSnapshot.imgMat != null && bgrMat == null) {
    22.             bgrMat = new Mat(arCamSnapshot.imgMat.rows(), arCamSnapshot.imgMat.cols(), CvType.CV_8UC3);
    23.         }
    24.  
    25.         if (arCamSnapshot.imgMat != null && bgrMat != null && net != null) {
    26.  
    27.             Imgproc.cvtColor(arCamSnapshot.imgMat, bgrMat, Imgproc.COLOR_RGBA2BGR);
    28.  
    29.             Size inpSize = new Size(inpWidth > 0 ? inpWidth : bgrMat.cols(),
    30.                 inpHeight > 0 ? inpHeight : bgrMat.rows());
    31.             Mat blob = Dnn.blobFromImage(bgrMat, scale, inpSize, mean, swapRB, false);
    32.  
    33.             net.setInput(blob);
    34.  
    35.             TickMeter tm = new TickMeter();
    36.             tm.start();
    37.  
    38.             List<Mat> outs = new List<Mat>();
    39.             net.forward(outs, outBlobNames);
    40.  
    41.             tm.stop();
    42.             // Debug.Log("Inference time, ms: " + tm.getTimeMilli());
    43.  
    44.             postprocess(arCamSnapshot.imgMat, outs, net, Dnn.DNN_BACKEND_OPENCV);
    45.  
    46.             for (int i = 0; i < outs.Count; i++) {
    47.                 outs[i].Dispose();
    48.             }
    49.             blob.Dispose();
    50.  
    51.             Utils.fastMatToTexture2D(arCamSnapshot.imgMat, rawImageTexture);
    52.         }
    53.     }
    My texture looks normal. If i dont postprocess image from imgMat comes out normal. Bytheway postprocess method is exactly the same as in example. What am i doing wrong?
    Please help! :)
     
  47. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    Thank you very much for reporting.
    Could you send me a screenshot of the output?
    https://enoxsoftware.com/opencvforunity/contact/technical-inquiry/
     
  48. datunito

    datunito

    Joined:
    Dec 6, 2020
    Posts:
    1
    I made a ".weight" file using yolov4
    Is it possible to use this weight file to detect objects like Yoloemample?
    [I use google translate]
     
  49. EnoxSoftware

    EnoxSoftware

    Joined:
    Oct 29, 2014
    Posts:
    1,328
    If the file is usable by the OpenCV 4.5.0 dnn module, you can use it in OpenCVForUnity 2.4.1.
    https://github.com/opencv/opencv/wiki/Deep-Learning-in-OpenCV
     
  50. AbdullahAllawati

    AbdullahAllawati

    Joined:
    Dec 16, 2020
    Posts:
    1
    hello,

    I'm trying to do feature matching in unity using the OpenCVForUnity package. I have taken a look at the featyre2dsample. However, what I want to do is to match the frames from a live feed of a webcam with an existing image in the database. Even though the code runs and I get the live feed, there seems to be no matching between the frames and the image in the database. I will share with you my code, if someone could clarify the problem to me, I will appreciate it.

    Thanks

    Code (CSharp):
    1. using System.Collections;
    2. using System.Collections.Generic;
    3. using UnityEngine;
    4. using OpenCVForUnity;
    5. using OpenCVForUnity.ImgprocModule;
    6. using OpenCVForUnity.UnityUtils;
    7. using OpenCVForUnity.VideoModule;
    8. using OpenCVForUnity.UtilsModule;
    9. using OpenCVForUnity.ObjdetectModule;
    10. using OpenCVForUnity.TrackingModule;
    11. using OpenCVForUnity.ImgcodecsModule;
    12. using OpenCVForUnity.CoreModule;
    13. using OpenCVForUnity.Features2dModule;
    14. using OpenCVForUnity.Xfeatures2dModule;
    15. using UnityEngine.UI;
    16. using OpenCVForUnityExample;
    17. using UnityEngine.SceneManagement;
    18. using OpenCVForUnity.UnityUtils.Helper;
    19. using System.Timers;
    20. using System;
    21.  
    22. [RequireComponent(typeof(WebCamTextureToMatHelper))]
    23. public class Scanner : MonoBehaviour
    24. {
    25.     public Texture2D baseTexture;
    26.     Texture2D texture;
    27.     WebCamTextureToMatHelper webCamTextureToMatHelper;
    28.     Color32[] colors;
    29.     Mat mainMat = new Mat();
    30.     Mat grayMat = new Mat();
    31.     Mat grayMat2 = new Mat();
    32.     MatOfKeyPoint keypoints1 = new MatOfKeyPoint ();
    33.     Mat descriptors1 = new Mat ();
    34.     ORB detector;
    35.     ORB extractor;
    36.     int x =0;
    37.    
    38.     void Start()
    39.     {
    40.        
    41.         webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
    42.         webCamTextureToMatHelper.Initialize();
    43.  
    44.        
    45.        
    46.     }
    47. public void OnWebCamTextureToMatHelperInitialized()
    48.     {
    49.         Debug.Log ("OnWebCamTextureToMatHelperInited");
    50.         Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
    51.         colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
    52.         texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
    53.         grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
    54.         gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
    55.         Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
    56.         float width = 0;
    57.         float height = 0;
    58.                                  
    59.         width = gameObject.transform.localScale.x;
    60.         height = gameObject.transform.localScale.y;
    61.                                  
    62.         float widthScale = (float)Screen.width / width;
    63.         float heightScale = (float)Screen.height / height;
    64.         if (widthScale < heightScale)
    65.             {
    66.                 Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
    67.             }
    68.         else
    69.             {
    70.                  Camera.main.orthographicSize = height / 2;
    71.             }
    72.             gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
    73.             }
    74. /// <summary>
    75. /// Raises the web cam texture to mat helper disposed event.
    76. /// </summary>
    77.     public void OnWebCamTextureToMatHelperDisposed ()
    78.     {
    79.         Debug.Log ("OnWebCamTextureToMatHelperDisposed");
    80.     }
    81.     // Update is called once per frame
    82.     void Update()
    83.     {
    84.         if (x ==0)
    85.         {
    86.         mainMat = new Mat(baseTexture.height, baseTexture.width, CvType.CV_8UC3);
    87.          grayMat = new Mat();
    88.         Utils.texture2DToMat(baseTexture,mainMat);
    89.         // copy mainMat to grayMat
    90.         mainMat.copyTo(grayMat);
    91.  
    92.         //convert color to gray
    93.         Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);
    94.        
    95.         // creating ORB Feature Detector
    96.         detector = ORB.create (1000);
    97.         extractor = ORB.create (1000);
    98.  
    99.         MatOfKeyPoint keypoints1 = new MatOfKeyPoint ();
    100.         Mat descriptors1 = new Mat ();
    101.  
    102.        
    103.         detector.detectAndCompute(grayMat2,new Mat(),keypoints1,descriptors1);
    104.         }
    105.        if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ())
    106.         {
    107.        
    108.        
    109.         //convert texture2D to mat
    110.         Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
    111.         grayMat2 = new Mat();
    112.         Imgproc.cvtColor (rgbaMat, grayMat2, Imgproc.COLOR_RGBA2GRAY);
    113.         MatOfKeyPoint keypoints2 = new MatOfKeyPoint ();
    114.         Mat descriptors2 = new Mat ();
    115.         detector.detectAndCompute(grayMat2,new Mat(),keypoints2,descriptors2);
    116.         DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
    117.  
    118.         List<MatOfDMatch> knnMatches = new List<MatOfDMatch>();
    119.         matcher.knnMatch (descriptors1, descriptors2, knnMatches, 2);
    120.             var arrayKnnMatches = knnMatches.ToArray();
    121.             List<DMatch> goodMatches = new List<DMatch>();
    122.             for (int i = 0; i < arrayKnnMatches.Length; ++i)
    123.             {
    124.                 var array = arrayKnnMatches[i].toArray();
    125.                 if (array[0].distance < 0.7 * array[1].distance)
    126.                 {
    127.                     goodMatches.Add(array[0]);
    128.                 }
    129.             }
    130.         Debug.Log(goodMatches.Count);
    131.         if (goodMatches.Count>=15)
    132.         {
    133.             Debug.Log("Object Detected");
    134.         }
    135.         Utils.matToTexture2D (grayMat2, texture, colors);
    136.         }
    137.  
    138.         x =1;
    139.     }
    140.  
    141. }
     
unityunity