Пример #1
0
    public Vector3[] GetGeometry(OVRBoundary.BoundaryType boundaryType)
    {
        int num = 0;

        if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, IntPtr.Zero, ref num) && num > 0)
        {
            int num2 = num * OVRBoundary.cachedVector3fSize;
            if (OVRBoundary.cachedGeometryNativeBuffer.GetCapacity() < num2)
            {
                OVRBoundary.cachedGeometryNativeBuffer.Reset(num2);
            }
            int num3 = num * 3;
            if (OVRBoundary.cachedGeometryManagedBuffer.Length < num3)
            {
                OVRBoundary.cachedGeometryManagedBuffer = new float[num3];
            }
            if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, OVRBoundary.cachedGeometryNativeBuffer.GetPointer(0), ref num))
            {
                Marshal.Copy(OVRBoundary.cachedGeometryNativeBuffer.GetPointer(0), OVRBoundary.cachedGeometryManagedBuffer, 0, num3);
                Vector3[] array = new Vector3[num];
                for (int i = 0; i < num; i++)
                {
                    array[i] = new OVRPlugin.Vector3f
                    {
                        x = OVRBoundary.cachedGeometryManagedBuffer[3 * i],
                        y = OVRBoundary.cachedGeometryManagedBuffer[3 * i + 1],
                        z = OVRBoundary.cachedGeometryManagedBuffer[3 * i + 2]
                    }.FromFlippedZVector3f();
                }
                return(array);
            }
        }
        return(new Vector3[0]);
    }
Пример #2
0
    public static float GetMaximumBoundaryDistance(Camera camera, OVRBoundary.BoundaryType boundaryType)
    {
        if (!OVRManager.boundary.GetConfigured())
        {
            return(float.MaxValue);
        }

        Vector3[] geometry = OVRManager.boundary.GetGeometry(boundaryType);
        if (geometry.Length == 0)
        {
            return(float.MaxValue);
        }

        float maxDistance = -float.MaxValue;

        foreach (Vector3 v in geometry)
        {
            Vector3 pos      = GetWorldPosition(v);
            float   distance = Vector3.Dot(camera.transform.forward, pos);
            if (maxDistance < distance)
            {
                maxDistance = distance;
            }
        }
        return(maxDistance);
    }
Пример #3
0
    /// <summary>
    /// Returns an array of 3d points (in clockwise order) that define the specified boundary type.
    /// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
    /// </summary>
    public Vector3[] GetGeometry(OVRBoundary.BoundaryType boundaryType)
    {
        if (OVRManager.loadedXRDevice != OVRManager.XRDevice.Oculus)
        {
//#if UNITY_2017_1_OR_NEWER
//			if (Boundary.TryGetGeometry(cachedGeometryList, (boundaryType == BoundaryType.PlayArea) ? Boundary.Type.PlayArea : Boundary.Type.TrackedArea))
//			{
//				Vector3[] arr = cachedGeometryList.ToArray();
//				return arr;
//			}
//#endif
            Debug.LogError("This functionality is not supported in your current version of Unity.");
            return(null);
        }

        int pointsCount = 0;

        if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, IntPtr.Zero, ref pointsCount))
        {
            if (pointsCount > 0)
            {
                int requiredNativeBufferCapacity = pointsCount * cachedVector3fSize;
                if (cachedGeometryNativeBuffer.GetCapacity() < requiredNativeBufferCapacity)
                {
                    cachedGeometryNativeBuffer.Reset(requiredNativeBufferCapacity);
                }

                int requiredManagedBufferCapacity = pointsCount * 3;
                if (cachedGeometryManagedBuffer.Length < requiredManagedBufferCapacity)
                {
                    cachedGeometryManagedBuffer = new float[requiredManagedBufferCapacity];
                }

                if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, cachedGeometryNativeBuffer.GetPointer(), ref pointsCount))
                {
                    Marshal.Copy(cachedGeometryNativeBuffer.GetPointer(), cachedGeometryManagedBuffer, 0, requiredManagedBufferCapacity);

                    Vector3[] points = new Vector3[pointsCount];

                    for (int i = 0; i < pointsCount; i++)
                    {
                        points[i] = new OVRPlugin.Vector3f()
                        {
                            x = cachedGeometryManagedBuffer[3 * i + 0],
                            y = cachedGeometryManagedBuffer[3 * i + 1],
                            z = cachedGeometryManagedBuffer[3 * i + 2],
                        }.FromFlippedZVector3f();
                    }

                    return(points);
                }
            }
        }

        return(new Vector3[0]);
    }
Пример #4
0
 public OVRBoundary.BoundaryTestResult TestPoint(Vector3 point, OVRBoundary.BoundaryType boundaryType)
 {
     OVRPlugin.BoundaryTestResult boundaryTestResult = OVRPlugin.TestBoundaryPoint(point.ToFlippedZVector3f(), (OVRPlugin.BoundaryType)boundaryType);
     return(new OVRBoundary.BoundaryTestResult
     {
         IsTriggering = (boundaryTestResult.IsTriggering == OVRPlugin.Bool.True),
         ClosestDistance = boundaryTestResult.ClosestDistance,
         ClosestPoint = boundaryTestResult.ClosestPoint.FromFlippedZVector3f(),
         ClosestPointNormal = boundaryTestResult.ClosestPointNormal.FromFlippedZVector3f()
     });
 }
Пример #5
0
    /// <summary>
    /// Returns an array of 3d points (in clockwise order, max 256 points) that define the specified boundary type.
    /// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
    /// </summary>
    public Vector3[] GetGeometry(OVRBoundary.BoundaryType boundaryType)
    {
        OVRPlugin.BoundaryGeometry geo = OVRPlugin.GetBoundaryGeometry((OVRPlugin.BoundaryType)boundaryType);

        Vector3[] points = new Vector3[geo.PointsCount];
        for (int i = 0; i < geo.PointsCount; i++)
        {
            points[i] = geo.Points[i].FromFlippedZVector3f();
        }

        return(points);
    }
Пример #6
0
    /// <summary>
    /// Returns the results of testing a 3d point against the specified boundary type.
    /// The test point is expected in local tracking space.
    /// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
    /// </summary>
    public OVRBoundary.BoundaryTestResult TestPoint(Vector3 point, OVRBoundary.BoundaryType boundaryType)
    {
        OVRPlugin.BoundaryTestResult ovrpRes = OVRPlugin.TestBoundaryPoint(point.ToFlippedZVector3f(), (OVRPlugin.BoundaryType)boundaryType);

        OVRBoundary.BoundaryTestResult res = new OVRBoundary.BoundaryTestResult()
        {
            IsTriggering       = (ovrpRes.IsTriggering == OVRPlugin.Bool.True),
            ClosestDistance    = ovrpRes.ClosestDistance,
            ClosestPoint       = ovrpRes.ClosestPoint.FromFlippedZVector3f(),
            ClosestPointNormal = ovrpRes.ClosestPointNormal.FromFlippedZVector3f(),
        };

        return(res);
    }
Пример #7
0
	/// <summary>
	/// Returns the results of testing a tracked node against the specified boundary type.
	/// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
	/// </summary>
	public OVRBoundary.BoundaryTestResult TestNode(OVRBoundary.Node node, OVRBoundary.BoundaryType boundaryType)
	{
		OVRPlugin.BoundaryTestResult ovrpRes = OVRPlugin.TestBoundaryNode((OVRPlugin.Node)node, (OVRPlugin.BoundaryType)boundaryType);

		OVRBoundary.BoundaryTestResult res = new OVRBoundary.BoundaryTestResult()
		{
			IsTriggering = (ovrpRes.IsTriggering == OVRPlugin.Bool.True),
			ClosestDistance = ovrpRes.ClosestDistance,
			ClosestPoint = ovrpRes.ClosestPoint.FromFlippedZVector3f(),
			ClosestPointNormal = ovrpRes.ClosestPointNormal.FromFlippedZVector3f(),
		};

		return res;
	}
Пример #8
0
    public static Mesh BuildBoundaryMesh(OVRBoundary.BoundaryType boundaryType, float topY, float bottomY)
    {
        if (!OVRManager.boundary.GetConfigured())
        {
            return(null);
        }

        List <Vector3> geometry = new List <Vector3>(OVRManager.boundary.GetGeometry(boundaryType));

        if (geometry.Count == 0)
        {
            return(null);
        }

        geometry.Add(geometry[0]);
        int numPoints = geometry.Count;

        Vector3[] vertices = new Vector3[numPoints * 2];
        Vector2[] uvs      = new Vector2[numPoints * 2];
        for (int i = 0; i < numPoints; ++i)
        {
            Vector3 v = geometry[i];
            vertices[i]             = new Vector3(v.x, bottomY, v.z);
            vertices[i + numPoints] = new Vector3(v.x, topY, v.z);
            uvs[i]             = new Vector2((float)i / (numPoints - 1), 0.0f);
            uvs[i + numPoints] = new Vector2(uvs[i].x, 1.0f);
        }

        int[] triangles = new int[(numPoints - 1) * 2 * 3];
        for (int i = 0; i < numPoints - 1; ++i)
        {
            // the geometry is built clockwised. only the back faces should be rendered in the camera frame mask

            triangles[i * 6 + 0] = i;
            triangles[i * 6 + 1] = i + numPoints;
            triangles[i * 6 + 2] = i + 1 + numPoints;

            triangles[i * 6 + 3] = i;
            triangles[i * 6 + 4] = i + 1 + numPoints;
            triangles[i * 6 + 5] = i + 1;
        }

        Mesh mesh = new Mesh();

        mesh.vertices  = vertices;
        mesh.uv        = uvs;
        mesh.triangles = triangles;
        return(mesh);
    }
Пример #9
0
	/// <summary>
	/// Returns a vector that indicates the spatial dimensions of the specified boundary type. (x = width, y = height, z = depth)
	/// </summary>
	public Vector3 GetDimensions(OVRBoundary.BoundaryType boundaryType)
	{
		if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
			return OVRPlugin.GetBoundaryDimensions((OVRPlugin.BoundaryType)boundaryType).FromVector3f();

		else
		{
#if !USING_XR_SDK && !REQUIRES_XR_SDK
			Vector3 dimensions;
			if (Boundary.TryGetDimensions(out dimensions, (boundaryType == BoundaryType.PlayArea) ? Boundary.Type.PlayArea : Boundary.Type.TrackedArea))
				return dimensions;
#endif
			return Vector3.zero;
		}
	}
Пример #10
0
	public static Mesh BuildBoundaryMesh(OVRBoundary.BoundaryType boundaryType, float topY, float bottomY)
	{
		if (!OVRManager.boundary.GetConfigured())
		{
			return null:
		}

		List<Vector3> geometry = new List<Vector3>(OVRManager.boundary.GetGeometry(boundaryType)):
		if (geometry.Count == 0)
		{
			return null:
		}

		geometry.Add(geometry[0]):
		int numPoints = geometry.Count:

		Vector3[] vertices = new Vector3[numPoints * 2]:
		Vector2[] uvs = new Vector2[numPoints * 2]:
		for (int i = 0: i < numPoints: ++i)
		{
			Vector3 v = geometry[i]:
			vertices[i] = new Vector3(v.x, bottomY, v.z):
			vertices[i + numPoints] = new Vector3(v.x, topY, v.z):
			uvs[i] = new Vector2((float)i / (numPoints - 1), 0.0f):
			uvs[i + numPoints] = new Vector2(uvs[i].x, 1.0f):
		}

		int[] triangles = new int[(numPoints - 1) * 2 * 3]:
		for (int i = 0: i < numPoints - 1: ++i)
		{
			// the geometry is built clockwised. only the back faces should be rendered in the camera frame mask

			triangles[i * 6 + 0] = i:
			triangles[i * 6 + 1] = i + numPoints:
			triangles[i * 6 + 2] = i + 1 + numPoints:

			triangles[i * 6 + 3] = i:
			triangles[i * 6 + 4] = i + 1 + numPoints:
			triangles[i * 6 + 5] = i + 1:
		}

		Mesh mesh = new Mesh():
		mesh.vertices = vertices:
		mesh.uv = uvs:
		mesh.triangles = triangles:
		return mesh:
	}
Пример #11
0
    /// <summary>
    /// Returns a vector that indicates the spatial dimensions of the specified boundary type. (x = width, y = height, z = depth)
    /// </summary>
    public Vector3 GetDimensions(OVRBoundary.BoundaryType boundaryType)
    {
        if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
        {
            return(OVRPlugin.GetBoundaryDimensions((OVRPlugin.BoundaryType)boundaryType).FromVector3f());
        }

        else
        {
            Vector3 dimensions;
            if (Boundary.TryGetDimensions(out dimensions, (boundaryType == BoundaryType.PlayArea) ? Boundary.Type.PlayArea : Boundary.Type.TrackedArea))
            {
                return(dimensions);
            }
            return(Vector3.zero);
        }
    }
Пример #12
0
    /// <summary>
    /// Returns a vector that indicates the spatial dimensions of the specified boundary type. (x = width, y = height, z = depth)
    /// </summary>
    public Vector3 GetDimensions(OVRBoundary.BoundaryType boundaryType)
    {
        if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
        {
            return(OVRPlugin.GetBoundaryDimensions((OVRPlugin.BoundaryType)boundaryType).FromVector3f());
        }

        else
        {
#if UNITY_2017_1_OR_NEWER
            Vector3 dimensions;
            //if (Boundary.TryGetDimensions(out dimensions, (boundaryType == BoundaryType.PlayArea) ? Boundary.Type.PlayArea : Boundary.Type.TrackedArea))
            //return dimensions;
#endif
            return(Vector3.zero);
        }
    }
Пример #13
0
    /// <summary>
    /// Returns an array of 3d points (in clockwise order) that define the specified boundary type.
    /// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
    /// </summary>
    public Vector3[] GetGeometry(OVRBoundary.BoundaryType boundaryType)
    {
        int pointsCount = 0;

        if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, IntPtr.Zero, ref pointsCount))
        {
            if (pointsCount > 0)
            {
                int requiredNativeBufferCapacity = pointsCount * cachedVector3fSize;
                if (cachedGeometryNativeBuffer.GetCapacity() < requiredNativeBufferCapacity)
                {
                    cachedGeometryNativeBuffer.Reset(requiredNativeBufferCapacity);
                }

                int requiredManagedBufferCapacity = pointsCount * 3;
                if (cachedGeometryManagedBuffer.Length < requiredManagedBufferCapacity)
                {
                    cachedGeometryManagedBuffer = new float[requiredManagedBufferCapacity];
                }

                if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, cachedGeometryNativeBuffer.GetPointer(), ref pointsCount))
                {
                    Marshal.Copy(cachedGeometryNativeBuffer.GetPointer(), cachedGeometryManagedBuffer, 0, requiredManagedBufferCapacity);

                    Vector3[] points = new Vector3[pointsCount];

                    for (int i = 0; i < pointsCount; i++)
                    {
                        points[i] = new OVRPlugin.Vector3f()
                        {
                            x = cachedGeometryManagedBuffer[3 * i + 0],
                            y = cachedGeometryManagedBuffer[3 * i + 1],
                            z = cachedGeometryManagedBuffer[3 * i + 2],
                        }.FromFlippedZVector3f();
                    }

                    return(points);
                }
            }
        }

        return(new Vector3[0]);
    }
Пример #14
0
    public static Mesh BuildBoundaryMesh(OVRBoundary.BoundaryType boundaryType, float topY, float bottomY)
    {
        if (!OVRManager.boundary.GetConfigured())
        {
            return(null);
        }
        List <Vector3> list = new List <Vector3>(OVRManager.boundary.GetGeometry(boundaryType));

        if (list.Count == 0)
        {
            return(null);
        }
        list.Add(list[0]);
        int count = list.Count;

        Vector3[] array  = new Vector3[count * 2];
        Vector2[] array2 = new Vector2[count * 2];
        for (int i = 0; i < count; i++)
        {
            Vector3 vector = list[i];
            array[i]          = new Vector3(vector.x, bottomY, vector.z);
            array[i + count]  = new Vector3(vector.x, topY, vector.z);
            array2[i]         = new Vector2((float)i / (float)(count - 1), 0f);
            array2[i + count] = new Vector2(array2[i].x, 1f);
        }
        int[] array3 = new int[(count - 1) * 2 * 3];
        for (int j = 0; j < count - 1; j++)
        {
            array3[j * 6]     = j;
            array3[j * 6 + 1] = j + count;
            array3[j * 6 + 2] = j + 1 + count;
            array3[j * 6 + 3] = j;
            array3[j * 6 + 4] = j + 1 + count;
            array3[j * 6 + 5] = j + 1;
        }
        return(new Mesh
        {
            vertices = array,
            uv = array2,
            triangles = array3
        });
    }
Пример #15
0
    void GetGuardianBoundary(OVRBoundary.BoundaryType boundaryType)
    {
        ClearBoundaryPrefabList();

        List <Vector3> boundaryTemp = new List <Vector3>();
        List <Vector3> heightVerts  = new List <Vector3>();

        boundaryTemp.AddRange(boundary.GetGeometry(boundaryType));

        for (int i = 0; i < boundaryTemp.Count; i++)
        {
            Vector3 temp = boundaryTemp[i];
            temp.y += maxBoundaryHeight;
            heightVerts.Add(temp);
        }

        boundaryTemp.AddRange(heightVerts);

        boundaryVertices = boundaryTemp.ToArray();

        xSize = boundaryVertices.Length;
    }
Пример #16
0
    public static float GetMaximumBoundaryDistance(Camera camera, OVRBoundary.BoundaryType boundaryType)
    {
        if (!OVRManager.boundary.GetConfigured())
        {
            return(float.MaxValue);
        }
        Vector3[] geometry = OVRManager.boundary.GetGeometry(boundaryType);
        if (geometry.Length == 0)
        {
            return(float.MaxValue);
        }
        float num = float.MinValue;

        foreach (Vector3 trackingSpacePosition in geometry)
        {
            Vector3 worldPosition = OVRCompositionUtil.GetWorldPosition(trackingSpacePosition);
            float   num2          = Vector3.Dot(camera.transform.forward, worldPosition);
            if (num < num2)
            {
                num = num2;
            }
        }
        return(num);
    }
Пример #17
0
 /// <summary>
 /// Returns a vector that indicates the spatial dimensions of the specified boundary type. (x = width, y = height, z = depth)
 /// </summary>
 public Vector3 GetDimensions(OVRBoundary.BoundaryType boundaryType)
 {
     return(OVRPlugin.GetBoundaryDimensions((OVRPlugin.BoundaryType)boundaryType).FromVector3f());
 }