Search Unity

Why is XR Input so overcomplicated

Discussion in 'VR' started by CGPepper, Apr 15, 2020.

  1. CGPepper

    CGPepper

    Joined:
    Jan 28, 2013
    Posts:
    152
    What is the deal with the new XR system.
    Checkout the code below that you need to write to get primary button input.
    You gotta mess with Lists of InputDevices, XRNodes, onAwake, onDisable, onEnable, InputDevices_deviceConnected, InputDevices_deviceDisconnected

    Why can't we just get the Grip value event. I don't care if its left hand, right hand. Hell, i'll take the grip event from my eye tracking device if it sends it.
    Why is it so convoluted?


    Code (CSharp):
    1. using System.Collections.Generic;
    2. using UnityEngine;
    3. using UnityEngine.Events;
    4. using UnityEngine.XR;
    5.  
    6. [System.Serializable]
    7. public class PrimaryButtonEvent : UnityEvent<bool> { }
    8.  
    9. public class PrimaryButtonWatcher : MonoBehaviour
    10. {
    11.     public PrimaryButtonEvent primaryButtonPress;
    12.  
    13.     private bool lastButtonState = false;
    14.     private List<InputDevice> devicesWithPrimaryButton;
    15.  
    16.     private void Awake()
    17.     {
    18.         if (primaryButtonPress == null)
    19.         {
    20.             primaryButtonPress = new PrimaryButtonEvent();
    21.         }
    22.  
    23.         devicesWithPrimaryButton = new List<InputDevice>();
    24.     }
    25.  
    26.     void OnEnable()
    27.     {
    28.         List<InputDevice> allDevices = new List<InputDevice>();
    29.         InputDevices.GetDevices(allDevices);
    30.         foreach(InputDevice device in allDevices)
    31.             InputDevices_deviceConnected(device);
    32.  
    33.         InputDevices.deviceConnected += InputDevices_deviceConnected;
    34.         InputDevices.deviceDisconnected += InputDevices_deviceDisconnected;
    35.     }
    36.  
    37.     private void OnDisable()
    38.     {
    39.         InputDevices.deviceConnected -= InputDevices_deviceConnected;
    40.         InputDevices.deviceDisconnected -= InputDevices_deviceDisconnected;
    41.         devicesWithPrimaryButton.Clear();
    42.     }
    43.  
    44.     private void InputDevices_deviceConnected(InputDevice device)
    45.     {
    46.         bool discardedValue;
    47.         if (device.TryGetFeatureValue(CommonUsages.primaryButton, out discardedValue))
    48.         {
    49.             devicesWithPrimaryButton.Add(device); // Add any devices that have a primary button.
    50.         }
    51.     }
    52.  
    53.     private void InputDevices_deviceDisconnected(InputDevice device)
    54.     {
    55.         if (devicesWithPrimaryButton.Contains(device))
    56.             devicesWithPrimaryButton.Remove(device);
    57.     }
    58.  
    59.     void Update()
    60.     {
    61.         bool tempState = false;
    62.         foreach (var device in devicesWithPrimaryButton)
    63.         {
    64.             bool primaryButtonState = false;
    65.             tempState = device.TryGetFeatureValue(CommonUsages.primaryButton, out primaryButtonState) // did get a value
    66.                         && primaryButtonState // the value we got
    67.                         || tempState; // cumulative result from other controllers
    68.         }
    69.  
    70.         if (tempState != lastButtonState) // Button state changed since last frame
    71.         {
    72.             primaryButtonPress.Invoke(tempState);
    73.             lastButtonState = tempState;
    74.         }
    75.     }
    76. }
     
  2. a436t4ataf

    a436t4ataf

    Joined:
    May 19, 2013
    Posts:
    1,933
    OR you could do it in one line:

    http://snapandplug.com/xr-input-toolkit-2020-faq/#FAQ:-How-do-I-detect-button.IsPressed?

    You've picked an example that was written to show a fully-automated way to intelligently detect and manage a complex setup with changing devices and things being plugged and unplugged. That was written by the Unity team to demonstrate the different parts fitting together so that you could customise it completely and see all the working parts done right (personally, I felt the docs were pretty clear on this topic, but I guess if you read them too quickly you might not notice what's written about it).

    It wasn't written as "this is the only way to detect a button press".
     
    colinleet and gjf like this.
  3. CGPepper

    CGPepper

    Joined:
    Jan 28, 2013
    Posts:
    152
    Thanks for the reply man.
    Though your link does seem to be exactly the same code.

    Get a list of all attached devices
    Grab index 0 for a specific device
    Modify a variable by reference in an out parameter.

    I'd still need to implement the other lines if i want to attach a device on runtime, or the code will just not work.

    Sounds exactly like "the only way to detect a button press"
     
  4. a436t4ataf

    a436t4ataf

    Joined:
    May 19, 2013
    Posts:
    1,933
    You pasted > 50 lines of code. There's quite a big difference :).

    You have to get the device globally for your app anyway. IT's not really part of checking the button press - it's code that will be shared across many different parts of your code.

    Anyway. I wrote a tiny wrapper that works for 90% of the cases I want to cover, and now do it in one line in all my projects. If you feel that's still too much, I'm sorry to hear that. But I don't agree that XR is "over-complicated". I would actually argue the opposite: it's quite impressively simple and clean.
     
    colinleet and gjf like this.
  5. Wattosan

    Wattosan

    Joined:
    Mar 22, 2013
    Posts:
    460
    How were you able to get the 1D axis for pressing the trigger for example? How do you know if the trigger is half way? I do realize that you could calculate it with the threshold but I would not want to do that for every possible float value. I'd like to get a simple read of the float value as I need to animate the character hands.

    Another question is that how do you get the up and down events for the buttons? These are usually built into the input system but here it does not seem to be the case, at least when looking at the vr escape room project, which Unity released a while ago.
     
  6. a436t4ataf

    a436t4ataf

    Joined:
    May 19, 2013
    Posts:
    1,933
    You query the "how far pressed is the trigger" variable, which is a float going from 0 to 1. There's only three lines of code that query every possible input: one that returns bool, one that returns float, and one that returns Vector2. The XR library is typesafe configured so that when you specifiy what you ask for, your IDE will autocomplete the correct method / return type.

    There are no up/down events in the real world. It's standard practice with any hardware-controller (playstation, xbox, etc) input systems that you decide what kind of interpretation you want to use to simulate "button press". e.g. on the Quest, I find that Oculus hardware buttons because they have a powerful spring inside them will vibrate up and down over the course of half a second or so - for one of my projects, I treat a "press" as any period that started with button on, and then the button was on or off but never off for longer than 0.2 seconds, until the button has been off for 0.2 seconds.

    In a different project, that requires faster response, I have different logic.

    This stuff varies hugely from game to game, and even from controller to controller (I'm expecting that a year from now the Quest's controllers will have lost a lot of their springiness, and I'll have to tweak some of my algorithms. Or mabye they won't - have to wait and see) which is why it's normal for an input library to give you "on" and "off" (and if the hardware supports it: "fraction on") and then expect you to decide the rest yourself.
     
  7. Wattosan

    Wattosan

    Joined:
    Mar 22, 2013
    Posts:
    460
    How do I query for these? What are the 3 lines of code?

    Every earlier implementation of an input system has a built in button down, which gets called as soon as the button moves down from its upmost position. Same for having a button up event for when the button is fully released. The states in between are built into these systems. Unlike the old Unity's input system or the SteamVR input system or the OVRInput, we have to implement the states ourselves. And btw, there is a built in value called threshold for when the pressed state is actually reached.

    But still, if I want to decide on if the button is in a pressed state after having 0.3 of it pushed down, this is something I could easily implement on top of the state machine by setting a threshold and checking the pressed value (from 0 to 1). The API should easily expose these values.
     
  8. a436t4ataf

    a436t4ataf

    Joined:
    May 19, 2013
    Posts:
    1,933
    The exact same line given above, except you substitute different keys from the "CommonUsages" class. The link I gave you has a link to the Unity manual page that contains a large table showing all possible values and their meanings across all different VR headsets/controllers.

    State-machines are a terrible interface for input system programming. You can do some great toy projects and prototypes, but for a real game you want to do it correctly, and have low-level access to the controller input data, and do your own management of deadzones, sensitivity, input-latency, etc. XR gives us most of that already - and it still only requires one line of code. I think it's pretty good, personally.
     
    gjf likes this.
  9. Wattosan

    Wattosan

    Joined:
    Mar 22, 2013
    Posts:
    460
    Right, so I can get whether the button is pressed at all or not, and I can get how much it is pressed, and also the Axis2D if using a touchpad with either these:
    inputDevice.TryGetFeatureValue(CommonUsages.triggerButton, out bool value))

    inputDevice.TryGetFeatureValue(CommonUsages.trigger, out float value))


    But tell me, how do I, in 1 line get the events that the button went from 100% up (so it might not even be touched by the player) to less than 100%? Right...that is the button down event.

    So for example, if I want to detect that the keyboard key Space was pressed but I do not care about the fact that it is being held down, I use Input.GetKeyDown() or Input.GetButtonDown(). How do I achieve the same functionality in 1 line? I don't care about thresholds and other stuff like that. If I would care, I could implement it by checking the Axis1D value but I don't.

    And the beauty of this Input.GetKeyDown() is that I do not need to track the button state in my script. So it is very easy and fast to use.
     
  10. CGPepper

    CGPepper

    Joined:
    Jan 28, 2013
    Posts:
    152
    Having to track the state of buttons surprised me as well.
    It's fine for one or two actions, but once you map every single button, it can get cluttered.
    (1/3 of the code in my sample above is the state tracking)
     
  11. jiraphatK

    jiraphatK

    Joined:
    Sep 29, 2018
    Posts:
    300
    Yeah. Unity needs to have an official static manager class to address this boilerplate of registering, unregistering and checking for valid device code.
    Currently, Unity uses multiple lines of code to register and track devices in multiple places which seems very redundant to me. They write the register unregister boilerplate in both XRController, SnapTurnProvider, and ControllerManager. :confused:
    I think the best solution, for now, would be to create your own manager class or use the new InputSystem. But last time I checked, the new system crashed with XRITK somehow.
     
  12. a436t4ataf

    a436t4ataf

    Joined:
    May 19, 2013
    Posts:
    1,933
    Why is it cluttered? They made a common interface, so you pass in a variable which is the button/thing/whatever you want to query. Make it into a method, and then parameterise your input.
     
  13. a436t4ataf

    a436t4ataf

    Joined:
    May 19, 2013
    Posts:
    1,933
    The keyboard is not a game controller. It is much less powerful, contains much cheaper electronics, does not wear out over time (relatively), does not have capacitive touch, does not register analog inputs.

    It uses a simpler API because it's a simpler device for simpler use that doesn't work great for games. If you want to make games that are simple enough to only use keyboard-level input - great. Go for it! - but it's not really fair to call that a problem with the game engine.

    Personally: I appreciate a game-engine that treats input and controllers properly (Which, let's be clear, Unity did OK back in 2004, but then they failed to update it for so many years that it rapidly fell behind the curve - I don't *at all* defend how long it's taken them to write a decent input layer to replace the legacy one :). But what's coming through now - it's pretty good).

    It sounds like you want some kind of "VR Game Maker" that adds a lot of high-level simplifications (which will, unfortauntely, make it work badly with a lot of game designs - but for the rest it'll be fine / good-enough). Thats a great idea - but it's not the kind of thing Unity makes (nor would I want them to. Asset store publishers should make it. Or open-source devleopers like you and me. I want Unity to focus on cross-integrating the different hardware layers and dealing with all that **** that costs me hundreds of hours to build and debug -- doing some simple input handling is something that takes me all of ten minutes, so it's no problem at all)
     
  14. Wattosan

    Wattosan

    Joined:
    Mar 22, 2013
    Posts:
    460
    I don't agree with you completely. Unity has always been a platform that tries to be very welcoming to new developers as well. For you it might take 10 minutes. For people new in the field, not having a simple to use system, will most likely scare them away. You do not get new people (who could become potential expert engine users in the future) on board by making it difficult to implement something as simple as detecting a button press. It is great that they are allowing you build your own system and customize however you want this, however it feels weird that they are not catering to the less experienced. This is something they've always done up to this point. Just look at any older VR management/Input systems. Every previous widely used input system for VR in Unity has simple methods for getting the button up and down events.

    It is true that new new people will probably get their thing done with the functionality provided to the via the XR Controllers, however as soon as you'd like to extend it a little bit, it is more difficult as it needs to be.

    And honestly, if they have experts working on the team like you seem to be, then spending that extra 10 minutes to develop the common use functionality, will only be beneficial to everybody in the long run.

    Also, while I do consider myself not an amateur anymore, it took me more than 10 minutes to implement the system for my every requirement. Much time went simply into researching that do they really not have these common use functionalities implemented like they usually do. Something that should have taken me a total of a minute or two to go through their API functions and implement for my use cases, took me more than an hour. This includes writing on the forum. And as can be seen, I am not the only one. So simply to avoid confusion, caused by lack of common functionality, they should implement the functions in their API, i.e make the system friendlier to use.
     
    Last edited: Apr 26, 2020
  15. hippocoder

    hippocoder

    Digital Ape

    Joined:
    Apr 11, 2010
    Posts:
    29,723
    I noticed Unity's new Input library doubles up on the functionality so you could use that and all it's niceness instead. I haven't looked too deeply but I saw I was able to access similar things so that's probably a thing.
     
    a436t4ataf and Wattosan like this.
  16. Wattosan

    Wattosan

    Joined:
    Mar 22, 2013
    Posts:
    460
    Thanks, I'll look it up!
     
  17. a436t4ataf

    a436t4ataf

    Joined:
    May 19, 2013
    Posts:
    1,933
    Yeah, this is one of my top 3 hopes with XRIT: that before they declare "version 1.0.0, officially released" they rewrite the entire docs from scratch. Right now lots of core documentation is completely missing (I mean: basic stuff, without which you cannot realistically use XRIT - like "how does InteractionManager work, what is it? [it has phases, which are never documented anywhere, but you can find them in the source code. These are fundamentally important!]" - which, ironically, other parts of the docs say "see InteractionManager docs for info" but there are *no* docs for that class :)).

    (my other two are probably: "search/replace remove ALL uses of the 'internal' keyword" ;), and "implement the missing APIs for hardware features (e.g. capacitive touch on Oculus, e.g. finger-tracking - really really important stuff which there is no way for us to workaround (*))

    (*) although finger-tracking can probably be done if you go and manually integrate every single vendor-specific library ... at that point, there is literally no point in using XRIT: you've rewritten it yourself!
     
    gjf likes this.
  18. fuzzy3d

    fuzzy3d

    Joined:
    Jun 17, 2009
    Posts:
    228
    myaydin and PsiconLab like this.
  19. johanhelsing_attensi

    johanhelsing_attensi

    Joined:
    Mar 24, 2020
    Posts:
    8
    Bit of a necro, but this was the first hit for me on google... Was also baffled by the lack of simple convenience methods for something so common. This is what I ended up doing:

    Code (CSharp):
    1. /// wrapper around unity xr interaction toolkit input to offer methods like GetButtonDown
    2. // ReSharper disable once ClassNeverInstantiated.Global
    3. public class XRInput : ITickable
    4. {
    5.     private readonly XRNode[] _deviceNodes = { XRNode.RightHand, XRNode.LeftHand };
    6.     private readonly InputHelpers.Button[] _trackedButtons = { InputHelpers.Button.PrimaryButton, InputHelpers.Button.SecondaryButton };
    7.     private readonly Dictionary<InputHelpers.Button, bool> _pressedThisFrame = new();
    8.     private readonly Dictionary<InputHelpers.Button, bool> _pressedLastFrame = new();
    9.  
    10.     public XRInput()
    11.     {
    12.         foreach (var button in _trackedButtons)
    13.         {
    14.             _pressedLastFrame[button] = false;
    15.             _pressedThisFrame[button] = false;
    16.         }
    17.     }
    18.  
    19.     public bool AnyButtonDown => _pressedThisFrame.Keys.Any(GetButtonDown);
    20.  
    21.     // whether the button is pressed on any of the controllers
    22.     public bool GetButton(InputHelpers.Button button)
    23.     {
    24.         foreach (var deviceNode in _deviceNodes)
    25.         {
    26.             var device = InputDevices.GetDeviceAtXRNode(deviceNode);
    27.             if (device.IsPressed(button, out var pressed) && pressed)
    28.             {
    29.                 return true;
    30.             }
    31.         }
    32.         return false;
    33.     }
    34.  
    35.     /// Whether the button was pressed during this frame (Update)
    36.     public bool GetButtonDown(InputHelpers.Button button)
    37.     {
    38.         return _pressedThisFrame[button] && !_pressedLastFrame[button];
    39.     }
    40.  
    41.     /// Whether the button was released during this frame (Update)
    42.     public bool GetButtonUp(InputHelpers.Button button)
    43.     {
    44.         return !_pressedThisFrame[button] && _pressedLastFrame[button];
    45.     }
    46.  
    47.     // called every Update
    48.     public void Tick()
    49.     {
    50.         foreach (var button in _trackedButtons)
    51.         {
    52.             _pressedLastFrame[button] = _pressedThisFrame[button];
    53.             _pressedThisFrame[button] = GetButton(button);
    54.         }
    55.     }
    56. }
    Maybe this saves someone some time. You could easily turn it a singleton with static methods if you prefer that.