Does the Unity (2018) native VR integration already support the Oculus Go (including controller tracking/input) or does it currently require any plugins?
Mostly. Here's the code I use for controller tracking: Code (CSharp): using System.Collections; using System.Collections.Generic; using UnityEngine; public class OculusVRTracking : MonoBehaviour { public Transform controller; public static bool leftHanded { get; private set; } void Awake() { #if UNITY_EDITOR leftHanded = false; // (whichever you want to test here) #else leftHanded = OVRInput.GetControllerPositionTracked(OVRInput.Controller.LTouch); #endif } void Update() { OVRInput.Controller c = leftHanded ? OVRInput.Controller.LTouch : OVRInput.Controller.RTouch; if (OVRInput.GetControllerPositionTracked(c)) { controller.localRotation = OVRInput.GetLocalControllerRotation(c); controller.localPosition = OVRInput.GetLocalControllerPosition(c); } } } This does require OVRInput, which is part of the Oculus Utilities plug-in. However, I recommend simple code like the above rather than Oculus's sample code, which grossly overcomplicates it. With the above for tracking controllers, and the camera automatically tracking the head, you're just about set. The only other thing you might need is checking the various controller buttons; you can do that to some extent through the standard Input class, but to get things like touches on the thumb disc, you need to use OVRInput again. Here's how I do it (again, much simpler than the Oculus code makes it seem): Code (CSharp): using System.Collections; using System.Collections.Generic; using UnityEngine; public class OculusInput : MonoBehaviour { // Virtual buttons which we adapt to both desktop and controller inputs: public enum VButton { Left = 0, Right, Up, Down, Trigger, Back, SwipeLeft, SwipeRight, SwipeUp, SwipeDown } const int VButton_count = 10; bool[] currBtnState = new bool[VButton_count]; bool[] prevBtnState = new bool[VButton_count]; Vector2 touchDownPos; Vector2 touchUpPos; float touchDownTime; static OculusInput _instance; public static OculusInput instance { get { return _instance; } } void Awake() { _instance = this; } void Update() { for (int i=0; i<VButton_count; i++) prevBtnState[i] = currBtnState[i]; #if UNITY_EDITOR bool shift = Input.GetKey(KeyCode.LeftShift) || Input.GetKey(KeyCode.RightShift); currBtnState[(int)VButton.Left] = Input.GetKey(KeyCode.LeftArrow) && !shift; currBtnState[(int)VButton.Right] = Input.GetKey(KeyCode.RightArrow) && !shift; currBtnState[(int)VButton.Up] = Input.GetKey(KeyCode.UpArrow) && !shift; currBtnState[(int)VButton.Down] = Input.GetKey(KeyCode.DownArrow) && !shift; currBtnState[(int)VButton.Trigger] = Input.GetKey(KeyCode.Space) || Input.GetMouseButton(0); currBtnState[(int)VButton.Back] = Input.GetKey(KeyCode.Escape); currBtnState[(int)VButton.SwipeLeft] = Input.GetKey(KeyCode.LeftArrow) && shift; currBtnState[(int)VButton.SwipeRight] = Input.GetKey(KeyCode.RightArrow) && shift; currBtnState[(int)VButton.SwipeUp] = Input.GetKey(KeyCode.UpArrow) && shift; currBtnState[(int)VButton.SwipeDown] = Input.GetKey(KeyCode.DownArrow) && shift; #else if (OVRInput.Get(OVRInput.Button.PrimaryTouchpad)) { Vector2 pos = OVRInput.Get(OVRInput.Axis2D.PrimaryTouchpad); float ang = Mathf.Atan2(pos.y, pos.x) * Mathf.Rad2Deg; Debug.Log("Touch angle: " + ang); currBtnState[(int)VButton.Left] = (ang > 135 || ang < -135); currBtnState[(int)VButton.Right] = (ang < 45 && ang > -45); currBtnState[(int)VButton.Up] = (ang > 45 && ang < 135); currBtnState[(int)VButton.Down] = (ang < -45 && ang > -135); } else { currBtnState[(int)VButton.Left] = false; currBtnState[(int)VButton.Right] = false; currBtnState[(int)VButton.Up] = false; currBtnState[(int)VButton.Down] = false; currBtnState[(int)VButton.SwipeLeft] = false; currBtnState[(int)VButton.SwipeRight] = false; currBtnState[(int)VButton.SwipeUp] = false; currBtnState[(int)VButton.SwipeDown] = false; if (OVRInput.Get(OVRInput.Touch.PrimaryTouchpad)) { Vector2 pos = OVRInput.Get(OVRInput.Axis2D.PrimaryTouchpad); if (touchDownTime == 0) { touchDownTime = Time.time; touchDownPos = pos; } touchUpPos = pos; // (update this continually during the touch) } else if (touchDownTime > 0) { // Touch-up: trigger a swipe if we have moved a sufficient distance // since touch-down, within the last second. if (Time.time - touchDownTime < 1 && Vector2.Distance(touchDownPos, touchUpPos) > 0.5f) { float ang = Mathf.Atan2(touchUpPos.y - touchDownPos.y, touchUpPos.x - touchDownPos.x); Debug.Log("Swipe from " + touchDownPos + " to " + touchUpPos + " angle: " + ang); currBtnState[(int)VButton.SwipeLeft] = (ang > 135 || ang < -135); currBtnState[(int)VButton.SwipeRight] = (ang < 45 && ang > -45); currBtnState[(int)VButton.SwipeUp] = (ang > 45 && ang < 135); currBtnState[(int)VButton.SwipeDown] = (ang < -45 && ang > -135); } touchDownTime = 0; } } currBtnState[(int)VButton.Trigger] = OVRInput.Get(OVRInput.Button.PrimaryIndexTrigger); currBtnState[(int)VButton.Back] = OVRInput.Get(OVRInput.Button.Back); #endif } public static bool Get(VButton btn) { return _instance.currBtnState[(int)btn]; } public static bool GetDown(VButton btn) { return _instance.currBtnState[(int)btn] && !_instance.prevBtnState[(int)btn]; } public static bool GetUp(VButton btn) { return !_instance.currBtnState[(int)btn] && _instance.prevBtnState[(int)btn]; } }
Oh yes, the other thing to be aware of is the Oculus "Platform SDK". This is a plug-in to hook into Oculus's own networking features, including VoIP, match-making, etc. Even if you don't use most of their networking stuff (we're using Photon in our current project), if you have a networked game at all you should probably use Oculus's VoIP, or get the Oculus user name. For these you will need to delve into the Platform SDK. If your game doesn't do any networking, then you can probably ignore this.
Thank you for the help! I will use this method with the plugin(s) (until Unity adds native input support for the Go, like for the Rift/Vive )