예제 #1
0
        /// <summary>
        /// Handle taps. Handling only one tap per frame, as taps are usually low frequency
        /// Adds and anchor when the tap is over a plane
        /// </summary>
        protected override void HandleTaps(Camera camera, Google.AR.Core.Frame frame)
        {
            queuedSingleTaps.TryDequeue(out var tap);

            if (tap == null || camera.TrackingState != TrackingState.Tracking)
            {
                return;
            }
            foreach (var hit in frame.HitTest(tap))
            {
                var trackable = hit.Trackable;
                if (trackable is Plane plane && plane.IsPoseInPolygon(hit.HitPose))
                {
                    if (touches.Count >= 10)
                    {
                        break;
                    }

                    if (this.selectedCharacter == null)
                    {
                        break;
                    }
                    var model = new ObjModelRenderer();
                    model.CreateOnGlThread(this.context, selectedCharacter.AssetModel, selectedCharacter.AssetTexture);
                    model.SetMaterialProperties(0.0f, 3.5f, 1.0f, 6.0f);
                    this.anchors.Add(hit.CreateAnchor(), model);
                    break;
                }
            }
        }
예제 #2
0
        /// <summary>
        /// Renders the model for each anchor
        /// </summary>
        protected override void RenderAnchors(Camera camera, Google.AR.Core.Frame frame)
        {
            var scaleFactor      = 0.1f;
            var lightIntensity   = frame.LightEstimate.PixelIntensity;
            var projectionMatrix = new float[16];

            camera.GetProjectionMatrix(projectionMatrix, 0, 0.1f, 100.0f);
            var viewMatrix = new float[16];

            camera.GetViewMatrix(viewMatrix, 0);

            foreach (var anchor in this.anchors.Keys)
            {
                if (anchor.TrackingState != TrackingState.Tracking)
                {
                    continue;
                }

                // Get the current combined pose of an Anchor and Plane in world space. The Anchor
                // and Plane poses are updated during calls to session.update() as ARCore refines
                // its estimate of the world.
                anchor.Pose.ToMatrix(anchorMatrix, 0);
                this.anchors[anchor].UpdateModelMatrix(anchorMatrix, scaleFactor);
                this.anchors[anchor].Draw(viewMatrix, projectionMatrix, lightIntensity);
            }
        }
예제 #3
0
        /// <summary>
        /// Checks if some augmented images were detected
        /// </summary>
        protected override void CheckDetectedImages(Google.AR.Core.Frame frame)
        {
            var updatedAugmentedImages = frame.GetUpdatedTrackables(Java.Lang.Class.FromType(typeof(AugmentedImage)));

            foreach (var image in updatedAugmentedImages)
            {
                var imageName = ((AugmentedImage)image).Name;
                imageCapturedAction?.Invoke(imageName);
            }
        }
예제 #4
0
        /// <summary>
        /// Renders the point cloud and the detected planes
        /// </summary>
        protected override void RenderPlanes(Camera camera, Google.AR.Core.Frame frame)
        {
            var projectionMatrix = new float[16];

            camera.GetProjectionMatrix(projectionMatrix, 0, 0.1f, 100.0f);
            var viewMatrix = new float[16];

            camera.GetViewMatrix(viewMatrix, 0);


            // Check if we detected at least one plane. If so, hide the loading message.
            var planes = new List <Plane>();

            foreach (var p in session.GetAllTrackables(Java.Lang.Class.FromType(typeof(Plane))))
            {
                var plane = (Plane)p;
                planes.Add(plane);
            }
            if (planes.Any())
            {
                planeRenderer.DrawPlanes(planes, camera.DisplayOrientedPose, projectionMatrix);
            }
        }
        public void OnDrawFrame(IGL10 gl)
        {
            // Clear screen to notify driver it should not load any pixels from previous frame.
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mSession == null)
            {
                return;
            }

            // Notify ARCore session that the view size changed so that the perspective matrix and the video background
            // can be properly adjusted
            mDisplayRotationHelper.UpdateSessionIfNeeded(mSession);

            try
            {
                // Obtain the current frame from ARSession. When the configuration is set to
                // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
                // camera framerate.
                Google.AR.Core.Frame  frame  = mSession.Update();
                Google.AR.Core.Camera camera = frame.Camera;

                // Handle taps. Handling only one tap per frame, as taps are usually low frequency
                // compared to frame rate.
                MotionEvent tap = null;
                mQueuedSingleTaps.TryDequeue(out tap);

                if (tap != null && camera.TrackingState == TrackingState.Tracking)
                {
                    foreach (var hit in frame.HitTest(tap))
                    {
                        var trackable = hit.Trackable;

                        // Check if any plane was hit, and if it was hit inside the plane polygon.
                        if (trackable is Plane && ((Plane)trackable).IsPoseInPolygon(hit.HitPose))
                        {
                            // Cap the number of objects created. This avoids overloading both the
                            // rendering system and ARCore.
                            if (mAnchors.Count >= 16)
                            {
                                mAnchors[0].Detach();
                                mAnchors.RemoveAt(0);
                            }
                            // Adding an Anchor tells ARCore that it should track this position in
                            // space.  This anchor is created on the Plane to place the 3d model
                            // in the correct position relative to both the world and to the plane
                            mAnchors.Add(hit.CreateAnchor());

                            // Hits are sorted by depth. Consider only closest hit on a plane.
                            break;
                        }
                    }
                }

                // Draw background.
                mBackgroundRenderer.Draw(frame);

                // If not tracking, don't draw 3d objects.
                if (camera.TrackingState == TrackingState.Paused)
                {
                    return;
                }

                // Get projection matrix.
                float[] projmtx = new float[16];
                camera.GetProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

                // Get camera matrix and draw.
                float[] viewmtx = new float[16];
                camera.GetViewMatrix(viewmtx, 0);

                // Compute lighting from average intensity of the image.
                var lightIntensity = frame.LightEstimate.PixelIntensity;

                // Visualize tracked points.
                var pointCloud = frame.AcquirePointCloud();
                mPointCloud.Update(pointCloud);
                mPointCloud.Draw(camera.DisplayOrientedPose, viewmtx, projmtx);

                // App is repsonsible for releasing point cloud resources after using it
                pointCloud.Release();

                var planes = new List <Plane>();
                foreach (var p in mSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(Plane))))
                {
                    var plane = (Plane)p;
                    planes.Add(plane);
                }

                // Check if we detected at least one plane. If so, hide the loading message.
                if (mLoadingMessageSnackbar != null)
                {
                    foreach (var plane in planes)
                    {
                        if (plane.GetType() == Plane.Type.HorizontalUpwardFacing &&
                            plane.TrackingState == TrackingState.Tracking)
                        {
                            hideLoadingMessage();
                            break;
                        }
                    }
                }

                // Visualize planes.
                mPlaneRenderer.DrawPlanes(planes, camera.DisplayOrientedPose, projmtx);

                // Visualize anchors created by touch.
                float scaleFactor = 1.0f;
                foreach (var anchor in mAnchors)
                {
                    if (anchor.TrackingState != TrackingState.Tracking)
                    {
                        continue;
                    }

                    // Get the current combined pose of an Anchor and Plane in world space. The Anchor
                    // and Plane poses are updated during calls to session.update() as ARCore refines
                    // its estimate of the world.
                    anchor.Pose.ToMatrix(mAnchorMatrix, 0);

                    // Update and draw the model and its shadow.
                    mVirtualObject.updateModelMatrix(mAnchorMatrix, scaleFactor);
                    mVirtualObjectShadow.updateModelMatrix(mAnchorMatrix, scaleFactor);
                    mVirtualObject.Draw(viewmtx, projmtx, lightIntensity);
                    mVirtualObjectShadow.Draw(viewmtx, projmtx, lightIntensity);
                }
            }
            catch (System.Exception ex)
            {
                // Avoid crashing the application due to unhandled exceptions.
                Log.Error(TAG, "Exception on the OpenGL thread", ex);
            }
        }
예제 #6
0
        public void OnDrawFrame(IGL10 gl)
        {
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            try
            {
                if (mSession == null)
                {
                    return;
                }

                mDisplayRotationHelper.UpdateSessionIfNeeded(mSession);

                Google.AR.Core.Frame frame = mSession.Update();
                Camera camera = frame.Camera;

                MotionEvent tap = null;
                mQueuedSingleTaps.TryDequeue(out tap);

                if (tap != null && camera.TrackingState == TrackingState.Tracking)// && frame.GetTrackingState() == Google.AR.Core.Frame.TrackingState.Tracking)
                {
                    foreach (var hit in frame.HitTest(tap))
                    {
                        var trackable = hit.Trackable;

                        if (trackable is Plane && ((Plane)trackable).IsPoseInPolygon(hit.HitPose))
                        {
                            if (mTouches.Count >= 16)
                            {
                                //mSession.RemoveAnchors(new[] { mTouches[0].GetAnchor() });
                                mTouches.RemoveAt(0);
                            }
                            mTouches.Add(new PlaneAttachment((Plane)trackable, mSession.CreateAnchor(hit.HitPose)));

                            break;
                        }
                    }
                }

                mBackgroundRenderer.Draw(frame);

                if (camera.TrackingState == TrackingState.Paused)
                {
                    return;
                }

                float[] projmtx = new float[16];
                camera.GetProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

                float[] viewmtx = new float[16];
                camera.GetViewMatrix(viewmtx, 0);

                var lightIntensity = frame.LightEstimate.PixelIntensity;

                var pointCloud = frame.AcquirePointCloud();
                mPointCloud.Update(pointCloud);
                mPointCloud.Draw(camera.DisplayOrientedPose, viewmtx, projmtx);

                pointCloud.Release();

                var planes = new List <Plane>();
                foreach (var p in mSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(Plane))))
                {
                    var plane = (Plane)p;
                    planes.Add(plane);
                }

                if (mLoadingMessageSnackbar != null)
                {
                    foreach (var plane in planes)
                    {
                        if (plane.GetType() == Plane.Type.HorizontalUpwardFacing &&
                            plane.TrackingState == TrackingState.Tracking)
                        {
                            hideLoadingMessage();
                            break;
                        }
                    }
                }

                mPlaneRenderer.DrawPlanes(planes, camera.DisplayOrientedPose, projmtx);

                float scaleFactor = 1.0f;
                foreach (var planeAttachment in mTouches)
                {
                    if (!planeAttachment.IsTracking)
                    {
                        continue;
                    }

                    planeAttachment.GetPose().ToMatrix(mAnchorMatrix, 0);

                    mVirtualObject.updateModelMatrix(mAnchorMatrix, scaleFactor);
                    mVirtualObjectShadow.updateModelMatrix(mAnchorMatrix, scaleFactor);
                    mVirtualObject.Draw(viewmtx, projmtx, lightIntensity);
                    mVirtualObjectShadow.Draw(viewmtx, projmtx, lightIntensity);
                }
            }
            catch (System.Exception ex)
            {
                Log.Error(TAG, "Exception on the OpenGL thread", ex);
            }
        }
        public void OnDrawFrame(IGL10 gl)
        {
            // Clear screen to notify driver it should not load any pixels from previous frame.
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            try
            {
                // Obtain the current frame from ARSession. When the configuration is set to
                // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
                // camera framerate.
                Google.AR.Core.Frame frame = mSession.Update();

                // Handle taps. Handling only one tap per frame, as taps are usually low frequency
                // compared to frame rate.
                MotionEvent tap = null;
                mQueuedSingleTaps.TryDequeue(out tap);

                if (tap != null && frame.GetTrackingState() == Google.AR.Core.Frame.TrackingState.Tracking)
                {
                    foreach (var hit in frame.HitTest(tap))
                    {
                        // Check if any plane was hit, and if it was hit inside the plane polygon.
                        if (hit is PlaneHitResult && ((PlaneHitResult)hit).IsHitInPolygon)
                        {
                            // Cap the number of objects created. This avoids overloading both the
                            // rendering system and ARCore.
                            if (mTouches.Count >= 16)
                            {
                                mSession.RemoveAnchors(new[] { mTouches[0].GetAnchor() });
                                mTouches.RemoveAt(0);
                            }
                            // Adding an Anchor tells ARCore that it should track this position in
                            // space. This anchor will be used in PlaneAttachment to place the 3d model
                            // in the correct position relative both to the world and to the plane.
                            mTouches.Add(new PlaneAttachment(
                                             ((PlaneHitResult)hit).Plane,
                                             mSession.AddAnchor(hit.HitPose)));

                            // Hits are sorted by depth. Consider only closest hit on a plane.
                            break;
                        }
                    }
                }

                // Draw background.
                mBackgroundRenderer.Draw(frame);

                // If not tracking, don't draw 3d objects.
                if (frame.GetTrackingState() == Google.AR.Core.Frame.TrackingState.NotTracking)
                {
                    return;
                }

                // Get projection matrix.
                float[] projmtx = new float[16];
                mSession.GetProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

                // Get camera matrix and draw.
                float[] viewmtx = new float[16];
                frame.GetViewMatrix(viewmtx, 0);

                // Compute lighting from average intensity of the image.
                var lightIntensity = frame.LightEstimate.PixelIntensity;

                // Visualize tracked points.
                mPointCloud.Update(frame.PointCloud);
                mPointCloud.Draw(frame.PointCloudPose, viewmtx, projmtx);

                // Check if we detected at least one plane. If so, hide the loading message.
                if (mLoadingMessageSnackbar != null)
                {
                    foreach (var plane in mSession.AllPlanes)
                    {
                        if (plane.GetType() == Plane.Type.HorizontalUpwardFacing &&
                            plane.GetTrackingState() == Plane.TrackingState.Tracking)
                        {
                            break;
                        }
                    }
                }

                // Visualize planes.
                mPlaneRenderer.DrawPlanes(mSession.AllPlanes, frame.Pose, projmtx);

                // Visualize anchors created by touch.
                float scaleFactor = 1.0f;
                foreach (var planeAttachment in mTouches)
                {
                    if (!planeAttachment.IsTracking)
                    {
                        continue;
                    }

                    // Get the current combined pose of an Anchor and Plane in world space. The Anchor
                    // and Plane poses are updated during calls to session.update() as ARCore refines
                    // its estimate of the world.
                    planeAttachment.GetPose().ToMatrix(mAnchorMatrix, 0);

                    // Update and draw the model and its shadow.
                    mVirtualObject.updateModelMatrix(mAnchorMatrix, scaleFactor);
                    mVirtualObjectShadow.updateModelMatrix(mAnchorMatrix, scaleFactor);
                    mVirtualObject.Draw(viewmtx, projmtx, lightIntensity);
                    mVirtualObjectShadow.Draw(viewmtx, projmtx, lightIntensity);
                }
            }
            catch (System.Exception ex)
            {
                // Avoid crashing the application due to unhandled exceptions.
                Log.Error(TAG, "Exception on the OpenGL thread", ex);
            }
        }