public void Update()
 {
     _AppQuitOnEscape();
     AsyncTask.Update();
     //This function must be called before other Components' Update to ensure the accuracy of AREngine
     ARSession.Update();
 }
Esempio n. 2
0
        public void OnDrawFrame(IGL10 gl)
        {
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mSession == null)
            {
                return;
            }
            if (mDisplayRotationManager.GetDeviceRotation())
            {
                mDisplayRotationManager.UpdateArSessionDisplayGeometry(mSession);
            }

            try {
                mSession.SetCameraTextureName(mTextureDisplay.GetExternalTextureId());
                ARFrame  arFrame  = mSession.Update();
                ARCamera arCamera = arFrame.Camera;

                // The size of the projection matrix is 4 * 4.
                float[] projectionMatrix = new float[16];

                arCamera.GetProjectionMatrix(projectionMatrix, PROJ_MATRIX_OFFSET, PROJ_MATRIX_NEAR, PROJ_MATRIX_FAR);
                mTextureDisplay.OnDrawFrame(arFrame);
                StringBuilder sb = new StringBuilder();
                UpdateMessageData(sb);
                mTextDisplay.OnDrawFrame(sb);

                // The size of ViewMatrix is 4 * 4.
                float[] viewMatrix = new float[16];
                arCamera.GetViewMatrix(viewMatrix, 0);
                var allTrackables = mSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(ARPlane)));

                foreach (ARPlane plane in allTrackables)
                {
                    if (plane.Type != ARPlane.PlaneType.UnknownFacing &&
                        plane.TrackingState == ARTrackableTrackingState.Tracking)
                    {
                        HideLoadingMessage();
                        break;
                    }
                }
                mLabelDisplay.OnDrawFrame(allTrackables, arCamera.DisplayOrientedPose,
                                          projectionMatrix);
                HandleGestureEvent(arFrame, arCamera, projectionMatrix, viewMatrix);
                ARLightEstimate lightEstimate       = arFrame.LightEstimate;
                float           lightPixelIntensity = 1;
                if (lightEstimate.GetState() != ARLightEstimate.State.NotValid)
                {
                    lightPixelIntensity = lightEstimate.PixelIntensity;
                }
                DrawAllObjects(projectionMatrix, viewMatrix, lightPixelIntensity);
            } catch (ArDemoRuntimeException e) {
                Log.Info(TAG, "Exception on the ArDemoRuntimeException!");
            } catch (Exception t) {
                // This prevents the app from crashing due to unhandled exceptions.
                Log.Info(TAG, "Exception on the OpenGL thread: " + t.Message);
            }
        }
Esempio n. 3
0
        public void OnDrawFrame(IGL10 gl)
        {
            // Clear the color buffer and notify the driver not to load the data of the previous frame.
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mSession == null)
            {
                return;
            }
            if (mDisplayRotationManager.GetDeviceRotation())
            {
                mDisplayRotationManager.UpdateArSessionDisplayGeometry(mSession);
            }

            try
            {
                mSession.SetCameraTextureName(mTextureDisplay.GetExternalTextureId());
                ARFrame  arFrame  = mSession.Update();
                ARCamera arCamera = arFrame.Camera;

                // The size of the projection matrix is 4 * 4.
                float[] projectionMatrix = new float[16];

                // Obtain the projection matrix through ARCamera.
                arCamera.GetProjectionMatrix(projectionMatrix, PROJECTION_MATRIX_OFFSET, PROJECTION_MATRIX_NEAR,
                                             PROJECTION_MATRIX_FAR);
                mTextureDisplay.OnDrawFrame(arFrame);
                ICollection hands = mSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(ARHand)));
                if (hands.Count == 0)
                {
                    mTextDisplay.OnDrawFrame(null);
                    return;
                }
                foreach (ARHand hand in hands)
                {
                    // Update the hand recognition information to be displayed on the screen.
                    StringBuilder sb = new StringBuilder();
                    UpdateMessageData(sb, hand);

                    // Display hand recognition information on the screen.
                    mTextDisplay.OnDrawFrame(sb);
                }
                foreach (HandRelatedDisplay handRelatedDisplay in mHandRelatedDisplays)
                {
                    handRelatedDisplay.OnDrawFrame(hands, projectionMatrix);
                }
            }
            catch (ArDemoRuntimeException e)
            {
                Log.Info(TAG, "Exception on the ArDemoRuntimeException!");
            }
            catch (Exception t)
            {
                // This prevents the app from crashing due to unhandled exceptions.
                Log.Info(TAG, "Exception on the OpenGL thread " + t.Message);
            }
        }
        public void OnDrawFrame(IGL10 gl)
        {
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mArSession == null)
            {
                return;
            }
            if (mDisplayRotationManager.GetDeviceRotation())
            {
                mDisplayRotationManager.UpdateArSessionDisplayGeometry(mArSession);
            }

            try
            {
                mArSession.SetCameraTextureName(mTextureDisplay.GetExternalTextureId());
                ARFrame frame = mArSession.Update();
                mTextureDisplay.OnDrawFrame(frame);
                float fpsResult = DoFpsCalculate();

                System.Collections.ICollection faces = (System.Collections.ICollection)mArSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(ARFace)));

                if (faces.Count == 0)
                {
                    mTextDisplay.OnDrawFrame(null);
                    return;
                }
                Log.Debug(TAG, "Face number: " + faces.Count);
                ARCamera camera = frame.Camera;
                foreach (ARFace face in faces)
                {
                    if (face.TrackingState == ARTrackableTrackingState.Tracking)
                    {
                        mFaceGeometryDisplay.OnDrawFrame(camera, face);
                        StringBuilder sb = new StringBuilder();
                        UpdateMessageData(sb, fpsResult, face);
                        mTextDisplay.OnDrawFrame(sb);
                    }
                }
            }
            catch (ArDemoRuntimeException e)
            {
                Log.Debug(TAG, "Exception on the ArDemoRuntimeException!");
            }
            catch (Throwable t)
            {
                // This prevents the app from crashing due to unhandled exceptions.
                Log.Debug(TAG, "Exception on the OpenGL thread", t);
            }
        }
        public void OnDrawFrame(IGL10 gl)
        {
            // Clear the screen to notify the driver not to load pixels of the previous frame.
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mSession == null)
            {
                return;
            }
            if (mDisplayRotationManager.GetDeviceRotation())
            {
                mDisplayRotationManager.UpdateArSessionDisplayGeometry(mSession);
            }

            try
            {
                mSession.SetCameraTextureName(mTextureDisplay.GetExternalTextureId());
                ARFrame frame = mSession.Update();

                // The size of the projection matrix is 4 * 4.
                float[]  projectionMatrix = new float[16];
                ARCamera camera           = frame.Camera;

                // Obtain the projection matrix of ARCamera.
                camera.GetProjectionMatrix(projectionMatrix, PROJECTION_MATRIX_OFFSET, PROJECTION_MATRIX_NEAR,
                                           PROJECTION_MATRIX_FAR);
                mTextureDisplay.OnDrawFrame(frame);
                ICollection bodies = mSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(ARBody)));
                if (bodies.Count == 0)
                {
                    mTextDisplay.OnDrawFrame(null);
                    return;
                }
                foreach (ARBody body in bodies)
                {
                    if (body.TrackingState != ARTrackableTrackingState.Tracking)
                    {
                        continue;
                    }

                    // Update the body recognition information to be displayed on the screen.
                    StringBuilder sb = new StringBuilder();
                    UpdateMessageData(sb, body);

                    // Display the updated body information on the screen.
                    mTextDisplay.OnDrawFrame(sb);
                }
                foreach (BodyRelatedDisplay bodyRelatedDisplay in mBodyRelatedDisplays)
                {
                    bodyRelatedDisplay.OnDrawFrame(bodies, projectionMatrix);
                }
            }
            catch (ArDemoRuntimeException e)
            {
                Log.Info(TAG, "Exception on the ArDemoRuntimeException!");
            }
            catch (Throwable t)
            {
                // This prevents the app from crashing due to unhandled exceptions.
                Log.Info(TAG, "Exception on the OpenGL thread");
            }
        }
Esempio n. 6
0
 public void Update()
 {
     _AppQuitOnEscape();
     AsyncTask.Update();
     ARSession.Update();
 }