Exemple #1
0
        /** Called to draw the current frame. */
        public void onDrawFrame(IGL10 gl)
        {
            if (!IsActive)
            {
                return;
            }

            gl.GlClear(GL10.GlColorBufferBit | GL10.GlDepthBufferBit);

            // Call our native function to render camera content
            RecognitionFragment.RenderCamera(ViewportWidth, ViewportHeight, Angle);

            float[] mvpMatrix = new float[16];
            if (computeModelViewProjectionMatrix(mvpMatrix))
            {
                if (monkeyMesh != null && monkeyMesh.MeshLoaded)
                {
                    if (monkeyMesh.GLLoaded)
                    {
                        //draw our 3d mesh on top of the marker
                        monkeyMesh.DrawMesh(mvpMatrix);
                    }
                    else
                    {
                        monkeyMesh.InitMeshGL();
                    }

                    RenderUtils.CheckGLError("completed Monkey head Render");
                }
            }

            gl.GlFinish();
        }
Exemple #2
0
        private void InitLayout()
        {
            SetContentView(Resource.Layout.Main);

            m_arView = new ARView(this);
            AddContentView(m_arView, new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MatchParent, FrameLayout.LayoutParams.MatchParent));

            _cameraFragment = FragmentManager.FindFragmentById <RecognitionFragment>(Resource.Id.ar_fragment);
            _cameraFragment.StartRecognition(new RecognitionOptions(RecognitionOptions.RecognitionStorage.Local, RecognitionOptions.RecognitionMode.ContinuousScan,
                                                                    new CloudRecognitionInfo(new String[] { })), this);
        }
Exemple #3
0
 protected override void OnResume()
 {
     base.OnResume();
     //restart recognition on app resume
     _cameraFragment = FragmentManager.FindFragmentById <RecognitionFragment>(Resource.Id.ar_fragment);
     if (_cameraFragment != null)
     {
         _cameraFragment.StartRecognition(
             new RecognitionOptions(
                 RecognitionOptions.RecognitionStorage.Local,
                 RecognitionOptions.RecognitionMode.ContinuousScan,
                 new CloudRecognitionInfo(new String[] { })
                 ), this);
     }
     //resume our renderer
     if (m_arView != null)
     {
         m_arView.onResume();
     }
 }
Exemple #4
0
        /** Here we compute the model-view-projection matrix for OpenGL rendering
         * from the model-view and projection matrix computed by Pikkart's AR SDK.
         * the projection matrix is rotated accordingly to the screen orientation */
        public bool computeModelViewProjectionMatrix(float[] mvpMatrix)
        {
            RenderUtils.matrix44Identity(mvpMatrix);

            float w = (float)640;
            float h = (float)480;

            float ar = (float)ViewportHeight / (float)ViewportWidth;

            if (ViewportHeight > ViewportWidth)
            {
                ar = 1.0f / ar;
            }
            float h1 = h, w1 = w;

            if (ar < h / w)
            {
                h1 = w * ar;
            }
            else
            {
                w1 = h / ar;
            }

            float a = 0f, b = 0f;

            switch (Angle)
            {
            case 0:
                a = 1f; b = 0f;
                break;

            case 90:
                a = 0f; b = 1f;
                break;

            case 180:
                a = -1f; b = 0f;
                break;

            case 270:
                a = 0f; b = -1f;
                break;

            default: break;
            }

            float[] angleMatrix = new float[16];

            angleMatrix[0]  = a; angleMatrix[1] = b; angleMatrix[2] = 0.0f; angleMatrix[3] = 0.0f;
            angleMatrix[4]  = -b; angleMatrix[5] = a; angleMatrix[6] = 0.0f; angleMatrix[7] = 0.0f;
            angleMatrix[8]  = 0.0f; angleMatrix[9] = 0.0f; angleMatrix[10] = 1.0f; angleMatrix[11] = 0.0f;
            angleMatrix[12] = 0.0f; angleMatrix[13] = 0.0f; angleMatrix[14] = 0.0f; angleMatrix[15] = 1.0f;

            float[] projectionMatrix = (float[])RecognitionFragment.GetCurrentProjectionMatrix().Clone();
            projectionMatrix[5] = projectionMatrix[5] * (h / h1);

            float[] correctedProjection = new float[16];

            RenderUtils.matrixMultiply(4, 4, angleMatrix, 4, 4, projectionMatrix, correctedProjection);

            if (RecognitionFragment.IsTracking)
            {
                float[] modelviewMatrix = RecognitionFragment.GetCurrentModelViewMatrix();
                float[] temp_mvp        = new float[16];
                RenderUtils.matrixMultiply(4, 4, correctedProjection, 4, 4, modelviewMatrix, temp_mvp);
                RenderUtils.matrix44Transpose(temp_mvp, mvpMatrix);
                return(true);
            }
            return(false);
        }
        /** Called to draw the current frame. */
        public void OnDrawFrame(IGL10 gl)
        {
            if (!IsActive)
            {
                return;
            }

            gl.GlClear(GL10.GlColorBufferBit | GL10.GlDepthBufferBit);

            // Call our native function to render camera content
            RecognitionFragment.RenderCamera(ViewportWidth, ViewportHeight, Angle);
            if (RecognitionFragment.IsTracking)
            {
                Marker currentMarker = RecognitionFragment.CurrentMarker;
                //Here we decide which 3d object to draw and we draw it
                if (currentMarker.Id.CompareTo("3_522") == 0)
                {
                    float[] mvMatrix = new float[16];
                    float[] pMatrix  = new float[16];
                    if (ComputeModelViewProjectionMatrix(mvMatrix, pMatrix))
                    {
                        if (videoMesh != null && videoMesh.MeshLoaded)
                        {
                            if (videoMesh.GLLoaded)
                            {
                                videoMesh.DrawMesh(mvMatrix, pMatrix);
                            }
                            else
                            {
                                videoMesh.InitMeshGL();
                            }

                            RenderUtils.CheckGLError("completed video mesh Render");
                        }
                    }
                }
                else
                {
                    float[] mvpMatrix = new float[16];
                    if (ComputeModelViewProjectionMatrix(mvpMatrix))
                    {
                        //draw our 3d mesh on top of the marker
                        if (monkeyMesh != null && monkeyMesh.MeshLoaded)
                        {
                            if (monkeyMesh.GLLoaded)
                            {
                                monkeyMesh.DrawMesh(mvpMatrix);
                            }
                            else
                            {
                                monkeyMesh.InitMeshGL();
                            }

                            RenderUtils.CheckGLError("completed Monkey head Render");
                        }
                    }
                }
            }
            //if the video is still playing and we have lost tracking, we still draw the video,
            //but in a fixed frontal position
            if (!RecognitionFragment.IsTracking && videoMesh != null && videoMesh.IsPlaying())
            {
                float[] mvMatrix = new float[16];
                float[] pMatrix  = new float[16];
                ComputeProjectionMatrix(pMatrix);

                if (Angle == 0)
                {
                    mvMatrix[0]  = 1.0f; mvMatrix[1] = 0.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = -0.5f;
                    mvMatrix[4]  = 0.0f; mvMatrix[5] = -1.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = 0.4f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }
                else if (Angle == 90)
                {
                    mvMatrix[0]  = 0.0f; mvMatrix[1] = 1.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = -0.5f;
                    mvMatrix[4]  = 1.0f; mvMatrix[5] = 0.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = -0.5f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }
                else if (Angle == 180)
                {
                    mvMatrix[0]  = -1.0f; mvMatrix[1] = 0.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = 0.5f;
                    mvMatrix[4]  = 0.0f; mvMatrix[5] = 1.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = -0.4f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }
                else if (Angle == 270)
                {
                    mvMatrix[0]  = 0.0f; mvMatrix[1] = -1.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = 0.5f;
                    mvMatrix[4]  = -1.0f; mvMatrix[5] = 0.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = 0.5f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }

                videoMesh.DrawMesh(mvMatrix, pMatrix);
                RenderUtils.CheckGLError("completed video mesh Render");
            }

            gl.GlFinish();
        }
        public bool ComputeProjectionMatrix(float[] pMatrix)
        {
            RenderUtils.matrix44Identity(pMatrix);

            float w = (float)640;
            float h = (float)480;

            float ar = (float)ViewportHeight / (float)ViewportWidth;

            if (ViewportHeight > ViewportWidth)
            {
                ar = 1.0f / ar;
            }
            float h1 = h, w1 = w;

            if (ar < h / w)
            {
                h1 = w * ar;
            }
            else
            {
                w1 = h / ar;
            }

            float a = 0f, b = 0f;

            switch (Angle)
            {
            case 0:
                a = 1f; b = 0f;
                break;

            case 90:
                a = 0f; b = 1f;
                break;

            case 180:
                a = -1f; b = 0f;
                break;

            case 270:
                a = 0f; b = -1f;
                break;

            default: break;
            }

            float[] angleMatrix = new float[16];

            angleMatrix[0]  = a; angleMatrix[1] = b; angleMatrix[2] = 0.0f; angleMatrix[3] = 0.0f;
            angleMatrix[4]  = -b; angleMatrix[5] = a; angleMatrix[6] = 0.0f; angleMatrix[7] = 0.0f;
            angleMatrix[8]  = 0.0f; angleMatrix[9] = 0.0f; angleMatrix[10] = 1.0f; angleMatrix[11] = 0.0f;
            angleMatrix[12] = 0.0f; angleMatrix[13] = 0.0f; angleMatrix[14] = 0.0f; angleMatrix[15] = 1.0f;

            float[] projectionMatrix = (float[])RecognitionFragment.GetCurrentProjectionMatrix().Clone();
            projectionMatrix[5] = projectionMatrix[5] * (h / h1);

            RenderUtils.matrixMultiply(4, 4, angleMatrix, 4, 4, projectionMatrix, pMatrix);

            return(true);
        }