Search Unity

Problem of Using buttons to shift between NavigationRecognizer and ManipulationRecognizer

Discussion in 'VR' started by kayeejoe, Feb 8, 2018.

  1. kayeejoe

    kayeejoe

    Joined:
    Jan 12, 2018
    Posts:
    3
    Apologize for if I use the thread incorrectly.

    I followed Microsoft tutorial to create a project that I can modify according to my requirement. But the problem is when I press the "rotate button", I can rotate the focused object correctly, however, when I stop and release the tap gesture, the state will be changed back to 'translate' state, which means if I want to rotate again, I will need to press the rotate button once more.

    I have tried so many ways to achieve this, the current most hopeful manner is :
    I added two tasks in my ButtonAction script, and when I click a button, the recognizer will be changed to specific state. Here is the code:

    Code (CSharp):
    1. using System.Collections;
    2. using System.Collections.Generic;
    3. using UnityEngine;
    4. using UnityEngine.UI;
    5. //using UnityEngine.XR.WSA.Input;
    6.  
    7. namespace Academy.HoloToolkit.Unity
    8. {
    9.     public class ButtonAction : MonoBehaviour
    10.     {
    11.         public Button AssignYourButton;
    12.  
    13.  
    14.         // Use this for initialization
    15.         void Start()
    16.         {
    17.             //Button btn = AssignYourButton.GetComponent<Button>();
    18.             AssignYourButton.GetComponent<Button>().onClick.AddListener(TaskOnClickTranslate);
    19.             AssignYourButton.GetComponent<Button>().onClick.AddListener(TaskOnClickRotate);
    20.             //btn.onClick.AddListener(TaskOnClickRotate);
    21.             //btn.onClick.AddListener(TaskOnClickReset);
    22.         }
    23.  
    24.         // Update is called once per frame
    25.         void Update()
    26.         {
    27.             //Button btn = AssignYourButton.GetComponent<Button>();
    28.             if (AssignYourButton.GetComponent<Button>().name == "Translate")
    29.             {
    30.                 TaskOnClickTranslate();
    31.             }
    32.             else if (AssignYourButton.GetComponent<Button>().name == "Rotate")
    33.             {
    34.                 TaskOnClickRotate();
    35.             }
    36.             //else if(btn.name == "Reset")
    37.             //{
    38.             //    TaskOnClickReset();
    39.             //}
    40.  
    41.         }
    42.         void TaskOnClickTranslate()
    43.         {
    44.             //GameObject astro=GameObject.Find
    45.             Debug.Log("You have clicked the button for translation!");
    46.             GestureManager.Instance.Transition(GestureManager.Instance.ManipulationRecognizer);
    47.             print("Translate");
    48.         }
    49.         void TaskOnClickRotate()
    50.         {
    51.             Debug.Log("You have clicked the button for rotation!");
    52.             GestureManager.Instance.Transition(GestureManager.Instance.NavigationRecognizer);
    53.             print("Rotate");
    54.         }
    55.     }
    56. }
    57.  
    And in the GestureManger script, there is nothing special: two recognizer initialized in Awake function, and no action in the Update function.
    Here is also nothing new in GestureAction script:
    Code (CSharp):
    1.     private void Start()
    2.     {
    3.     }
    4.     void Update()
    5.     {
    6.  
    7.     }
    8.  
    9.     private void PerformRotation()
    10.     {
    11.         if (GestureManager.Instance.IsNavigating
    12.             &&
    13.             (!ExpandModel.Instance.IsModelExpanded ||
    14.             (ExpandModel.Instance.IsModelExpanded && HandsManager.Instance.FocusedGameObject == gameObject))
    15.             )
    16.         {
    17.             /* TODO: DEVELOPER CODING EXERCISE 2.c */
    18.  
    19.             // 2.c: Calculate rotationFactor based on GestureManager's NavigationPosition.X and multiply by RotationSensitivity.
    20.             // This will help control the amount of rotation.
    21.             rotationFactorX = GestureManager.Instance.NavigationPosition.x * RotationSensitivity;
    22.             rotationFactorY = GestureManager.Instance.NavigationPosition.y * RotationSensitivity;
    23.  
    24.             // 2.c: transform.Rotate along the Y axis using rotationFactor.
    25.             transform.Rotate(new Vector3(-1 * rotationFactorY, -1 * rotationFactorX, 0));
    26.  
    27.         }
    28.     }
    29.  
    30.     void PerformManipulationStart(Vector3 position)
    31.     {
    32.         manipulationPreviousPosition = position;
    33.     }
    34.  
    35.     void PerformManipulationUpdate(Vector3 position)
    36.     {
    37.         if (GestureManager.Instance.IsManipulating)
    38.         {
    39.             //GestureManager.Instance.Transition(GestureManager.Instance.ManipulationRecognizer);
    40.             /* TODO: DEVELOPER CODING EXERCISE 4.a */
    41.  
    42.             Vector3 moveVector = Vector3.zero;
    43.  
    44.             // 4.a: Calculate the moveVector as position - manipulationPreviousPosition.
    45.             moveVector = position - manipulationPreviousPosition;
    46.  
    47.             // 4.a: Update the manipulationPreviousPosition with the current position.
    48.             manipulationPreviousPosition = position;
    49.  
    50.             // 4.a: Increment this transform's position by the moveVector.
    51.             transform.position += moveVector;
    52.         }
    53.     }
    54.  




     
  2. kayeejoe

    kayeejoe

    Joined:
    Jan 12, 2018
    Posts:
    3
    Still finding the problem of it..