Search Unity

  1. Welcome to the Unity Forums! Please take the time to read our Code of Conduct to familiarize yourself with the forum rules and how to post constructively.
  2. We have updated the language to the Editor Terms based on feedback from our employees and community. Learn more.
    Dismiss Notice
  3. Join us on November 16th, 2023, between 1 pm and 9 pm CET for Ask the Experts Online on Discord and on Unity Discussions.
    Dismiss Notice

[Released] Glyph Recognition tool

Discussion in 'Assets and Asset Store' started by AdVd, Nov 21, 2015.

  1. AdVd

    AdVd

    Joined:
    Jan 16, 2015
    Posts:
    5
    https://www.assetstore.unity3d.com/en/#!/content/48975

    Create your own multi stroke glyphs using the editor window. The glyph input component will recognize the symbols you draw and will tell you which is the closest match from a list of glyph targets. A glyph display component is also provided and can work as a 2D LineRenderer for the UI.

    Works with both mouse and touch. The stroke display effects are customizable through scripting.

    Requires Unity 5.2.2, but I might add support for 4.6.5 or higher if someone needs it.

    Watch this tutorial to learn how to set up the glyph input component to recognize your glyphs:


    This video shows a few tests with the different recognition methods:



    GlyphRecognition tool & Navigation tool

    With the Glyph Recognition tool you can let users perform actions through the glyph input. For instance, you can use different spells by drawing their glyphs on the screen.
    With the Navigation tool you can customize a planar navigation area where your agents will be able to find paths. You can customize the way the agent moves through the path and you can modify the navigation area at runtime via scripting.

    Watch a demo of the usage of both my Glyph Recognition tool and my Navigation tool:


    Yout can get the Navigation tool here:

    https://www.assetstore.unity3d.com/en/#!/content/40037

    Navigation tool thread
     
    Last edited: Nov 21, 2015
    theANMATOR2b likes this.
  2. AdVd

    AdVd

    Joined:
    Jan 16, 2015
    Posts:
    5
    Update!

    Solved an error that caused the glyphs to be rendered black in an Android device.
    The new components for stroke visualization work properly in Android and have extra elements on the inspector: Base color for the vertices of the mesh and raycastTarget.
     
  3. AdVd

    AdVd

    Joined:
    Jan 16, 2015
    Posts:
    5
    Hi,

    You are not the first to ask something similar :). I will start by saying that that is not supported by the plugin. However, the plugin can be modified to work with VR, assuming this input can be transformed into a normalized 2D space.

    This is a fragment of the script that handles the input. This script also handles how the input is treated afterwards but I removed that part. The GlyphDrawInput class implements the IBeginDragHandler, IDragHandler, IEndDragHandler and IPointerClickHandler interfaces to receive the input from the pointer (touch or mouse). It should be possible to replace these methods with others that take your input and build the strokes in a similar way. I hope this snippet helps you to determine how hard would it be to make those changes before you buy ;).

    Code (CSharp):
    1.  
    2. using UnityEngine;
    3. using UnityEngine.UI;
    4. using UnityEngine.Events;
    5. using UnityEngine.EventSystems;
    6. using System.Collections.Generic;
    7. using System;
    8.  
    9. namespace AdVd.GlyphRecognition
    10. {
    11.     /// <summary>
    12.     /// UI component to draw glyphs and find the closest match within a set of stored glyphs using a specific matching method.
    13.     /// </summary>
    14.     [RequireComponent(typeof(RectTransform), typeof(Image))]
    15.     public class GlyphDrawInput : MonoBehaviour, IBeginDragHandler, IDragHandler, IEndDragHandler, IPointerClickHandler {
    16.  
    17.         [...]
    18.  
    19.         Vector2 prevPos;
    20.         bool RectEventPoint(Vector2 position, Camera pressEventCamera, out Vector2 localPoint){ // This method returns the normalized position of the pointer and whether that position is inside the drawing area
    21.             RectTransform rt =  transform as RectTransform;
    22.             Rect r = rt.rect;
    23.             RectTransformUtility.ScreenPointToLocalPointInRectangle(rt, position, pressEventCamera, out localPoint);
    24.  
    25.             localPoint-=r.center;
    26.             localPoint.x/=r.width*normalizedGlyphSize; localPoint.y/=r.height*normalizedGlyphSize;
    27.             return RectTransformUtility.RectangleContainsScreenPoint(rt, position, pressEventCamera);
    28.         }
    29.  
    30.         public void OnBeginDrag (PointerEventData eventData)
    31.         {
    32.             if (eventData.button!=PointerEventData.InputButton.Left) return;
    33.             stroke=new List<Vector2>();
    34.             Vector2 localPoint;
    35.             if (RectEventPoint(eventData.pressPosition, eventData.pressEventCamera, out localPoint)) stroke.Add (prevPos=localPoint);
    36.         }
    37.  
    38.         public void OnDrag (PointerEventData eventData)
    39.         {
    40.             if (eventData.button!=PointerEventData.InputButton.Left) return;
    41.             if (stroke!=null){
    42.                 Vector2 currPos;
    43.                 if (RectEventPoint(eventData.position, eventData.pressEventCamera, out currPos)){
    44.                     if (sampleDistance<Stroke.minSampleDistance){//No resample
    45.                         stroke.Add(currPos);
    46.                     }
    47.                     else{//Resample
    48.                         Vector2 dir=(currPos-prevPos);
    49.                         float dist=dir.magnitude;
    50.                         if (dist>0) dir/=dist;
    51.                         while(dist>sampleDistance){ // Sample the stroke with stable rate
    52.                             Vector2 point=prevPos+dir*sampleDistance;
    53.                             stroke.Add (point);
    54.                             prevPos=point;
    55.                             dist-=sampleDistance;
    56.                         }
    57.                     }
    58.                     if (OnPointDraw!=null){
    59.                         Vector2[] points=new Vector2[stroke.Count+1];
    60.                         stroke.CopyTo(points); points[points.Length-1]=currPos;
    61.                         OnPointDraw(points);
    62.                     }
    63.                 }
    64.             }
    65.         }
    66.  
    67.         public void OnEndDrag (PointerEventData eventData)
    68.         {
    69.             if (eventData.button!=PointerEventData.InputButton.Left) return;
    70.             if (stroke!=null){
    71.                 if (stroke.Count<2){
    72.                     stroke=null;
    73.                     if (OnPointDraw!=null) OnPointDraw(null);
    74.                     return;
    75.                 }
    76.                 Vector2 currPos;
    77.                 if (RectEventPoint(eventData.position, eventData.pressEventCamera, out currPos)) stroke.Add(currPos);
    78.                 if (strokeList==null) strokeList=new List<Stroke>();
    79.                 Stroke newStroke=new Stroke(stroke.ToArray());
    80.                 strokeList.Add(newStroke);
    81.                 stroke=null;
    82.                 if (OnStrokeDraw!=null) OnStrokeDraw(strokeList.ToArray());
    83.             }
    84.         }
    85.  
    86.         public void OnPointerClick (PointerEventData eventData)
    87.         {
    88.             if (eventData.button!=PointerEventData.InputButton.Left) return;
    89.             if (stroke==null && castOnTap){
    90.                 Cast();//This tells the GlyphDrawInput to run the recognition algorithm
    91.             }
    92.         }
    93.  
    94.         [...]
    95.  
    96.     }
    97. }
    98.  
    Feel free to ask if you have any questions.
     
  4. AdVd

    AdVd

    Joined:
    Jan 16, 2015
    Posts:
    5
    Yes, the input is received from Unity's UI/EventSystems on a canvas object. The result is displayed through custom Graphic components using CanvasRenderers.