我正在尝试使用Gear VR控制器和Oculus SDK进行齿轮VR应用程序。我让GazePointerring与控制器预制措施一起工作。我的应用程序可见一个标线,我可以用齿轮VR控制器四处走动。它检测到我放置在现场的立方体。我现在想做的是将标线指向立方体,并在控制器上握住按钮,以便立方体会粘在我的控制器型号上,并可以四处走动,直到我放开按钮为止。我一直在OVR Physics Raycaster脚本中搜索如何在IF语句中呼叫raycast命中并将其输入按钮命令。但是我找不到一种用对象来呼唤射线播放的方法。这是OVR Physics Raycaster脚本中的Oculus代码:
using System.Collections.Generic;
namespace UnityEngine.EventSystems
{
/// <summary>
/// Simple event system using physics raycasts.
/// </summary>
[RequireComponent(typeof(OVRCameraRig))]
public class OVRPhysicsRaycaster : BaseRaycaster
{
/// <summary>
/// Const to use for clarity when no event mask is set
/// </summary>
protected const int kNoEventMaskSet = -1;
/// <summary>
/// Layer mask used to filter events. Always combined with the camera's culling mask if a camera is used.
/// </summary>
[SerializeField]
public LayerMask m_EventMask = kNoEventMaskSet;
protected OVRPhysicsRaycaster()
{ }
public override Camera eventCamera
{
get
{
return GetComponent<OVRCameraRig>().leftEyeCamera;
}
}
/// <summary>
/// Depth used to determine the order of event processing.
/// </summary>
public virtual int depth
{
get { return (eventCamera != null) ? (int)eventCamera.depth : 0xFFFFFF; }
}
/// <summary>
/// Event mask used to determine which objects will receive events.
/// </summary>
public int finalEventMask
{
get { return (eventCamera != null) ? eventCamera.cullingMask & m_EventMask : kNoEventMaskSet; }
}
/// <summary>
/// Layer mask used to filter events. Always combined with the camera's culling mask if a camera is used.
/// </summary>
public LayerMask eventMask
{
get { return m_EventMask; }
set { m_EventMask = value; }
}
/// <summary>
/// Perform a raycast using the worldSpaceRay in eventData.
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public override void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
// This function is closely based on PhysicsRaycaster.Raycast
if (eventCamera == null)
return;
OVRRayPointerEventData rayPointerEventData = eventData as OVRRayPointerEventData;
if (rayPointerEventData == null)
return;
var ray = rayPointerEventData.worldSpaceRay;
float dist = eventCamera.farClipPlane - eventCamera.nearClipPlane;
var hits = Physics.RaycastAll(ray, dist, finalEventMask);
if (hits.Length > 1)
System.Array.Sort(hits, (r1, r2) => r1.distance.CompareTo(r2.distance));
if (hits.Length != 0)
{
for (int b = 0, bmax = hits.Length; b < bmax; ++b)
{
var result = new RaycastResult
{
gameObject = hits[b].collider.gameObject,
module = this,
distance = hits[b].distance,
index = resultAppendList.Count,
worldPosition = hits[0].point,
worldNormal = hits[0].normal,
};
resultAppendList.Add(result);
}
}
}
/// <summary>
/// Perform a Spherecast using the worldSpaceRay in eventData.
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
/// <param name="radius">Radius of the sphere</param>
public void Spherecast(PointerEventData eventData, List<RaycastResult> resultAppendList, float radius)
{
if (eventCamera == null)
return;
OVRRayPointerEventData rayPointerEventData = eventData as OVRRayPointerEventData;
if (rayPointerEventData == null)
return;
var ray = rayPointerEventData.worldSpaceRay;
float dist = eventCamera.farClipPlane - eventCamera.nearClipPlane;
var hits = Physics.SphereCastAll(ray, radius, dist, finalEventMask);
if (hits.Length > 1)
System.Array.Sort(hits, (r1, r2) => r1.distance.CompareTo(r2.distance));
if (hits.Length != 0)
{
for (int b = 0, bmax = hits.Length; b < bmax; ++b)
{
var result = new RaycastResult
{
gameObject = hits[b].collider.gameObject,
module = this,
distance = hits[b].distance,
index = resultAppendList.Count,
worldPosition = hits[0].point,
worldNormal = hits[0].normal,
};
resultAppendList.Add(result);
}
}
}
/// <summary>
/// Get screen position of this world position as seen by the event camera of this OVRPhysicsRaycaster
/// </summary>
/// <param name="worldPosition"></param>
/// <returns></returns>
public Vector2 GetScreenPos(Vector3 worldPosition)
{
// In future versions of Uinty RaycastResult will contain screenPosition so this will not be necessary
return eventCamera.WorldToScreenPoint(worldPosition);
}
}
}
先决条件:确保在您的场景中拥有OVR经理,它是单身人士,并且齿轮VR控制器(OvrInput类)需要工作。
我通常的方法是从控制器锚位置进行射线播放射线,并检查它是否达到了所需的对象
public class SampleScript : MonoBehaviour
{
public Transform anchorPos;
public GameObject detectionLineObject; // a gameObject with a line renderer
private RaycastHit _hitInfo;
private LineRenderer _detectionLine;
void Start()
{
GameObject line = Instantiate(detectionLineObject);
_detectionLine = line.GetComponent<LineRenderer>();
}
void Update()
{
DetectionManager();
}
void DetectionManager()
{
// check if controller is actually connected
if (!OVRInput.IsControllerConnected(OVRInput.Controller.RTrackedRemote) || !OVRInput.IsControllerConnected(OVRInput.Controller.LTrackedRemote))
{
return;
}
// launch a ray from the OVRCameraRig's Anchor Right
if (Physics.Raycast(anchorPos.position, anchorPos.forward, out _hitInfo))
{
// set our line points
_detectionLine.SetPosition(0, anchorPos.position);
_detectionLine.SetPosition(1, _hitInfo.point);
MyComponent target = _hitInfo.collider.gameObject.GetComponent<MyComponent>();
if (target != null)
{
// Do you stuff here
target.StartInteraction();
}
}
else
{
// point our line to infinity
_detectionLine.SetPosition(0, anchorPos.position);
_detectionLine.SetPosition(1, anchorPos.forward * 500.0f);
}
}
}