Example #1
0
        /** Called to draw the current frame. */
        public void onDrawFrame(IGL10 gl)
        {
            if (!IsActive)
            {
                return;
            }

            gl.GlClear(GL10.GlColorBufferBit | GL10.GlDepthBufferBit);

            // Call our native function to render camera content
            RecognitionFragment.RenderCamera(ViewportWidth, ViewportHeight, Angle);

            float[] mvpMatrix = new float[16];
            if (computeModelViewProjectionMatrix(mvpMatrix))
            {
                if (monkeyMesh != null && monkeyMesh.MeshLoaded)
                {
                    if (monkeyMesh.GLLoaded)
                    {
                        //draw our 3d mesh on top of the marker
                        monkeyMesh.DrawMesh(mvpMatrix);
                    }
                    else
                    {
                        monkeyMesh.InitMeshGL();
                    }

                    RenderUtils.CheckGLError("completed Monkey head Render");
                }
            }

            gl.GlFinish();
        }
Example #2
0
        /**
         * \brief Draw this mesh (in OpenGL).
         * @param modelViewProjection this mesh model-view-projection matrix.
         */
        public void DrawMesh(float[] modelViewProjection)
        {
            //set up gl state
            GLES20.GlEnable(GLES20.GlDepthTest);
            GLES20.GlDisable(GLES20.GlCullFaceMode);
            //GLES20.glCullFace(GLES20.GL_BACK);
            //GLES20.glFrontFace(GLES20.GL_CCW);

            //set shader program to use
            GLES20.GlUseProgram(mProgram_GL_ID);
            RenderUtils.CheckGLError("DrawMesh:glUseProgram");

            //find attrib and unifroms in shader program
            int vertexHandle = GLES20.GlGetAttribLocation(mProgram_GL_ID, "vertexPosition");
            //int normalHandle = GLES20.GlGetAttribLocation(Program_GL_ID, "vertexNormal");
            int textureCoordHandle = GLES20.GlGetAttribLocation(mProgram_GL_ID, "vertexTexCoord");
            int mvpMatrixHandle    = GLES20.GlGetUniformLocation(mProgram_GL_ID, "modelViewProjectionMatrix");
            int texSampler2DHandle = GLES20.GlGetUniformLocation(mProgram_GL_ID, "texSampler2D");

            RenderUtils.CheckGLError("DrawMesh:get attribs and uniforms");

            //upload mesh data to OpenGL attribs
            GLES20.GlVertexAttribPointer(vertexHandle, 3, GLES20.GlFloat, false, 0, mVertices_Buffer);
            //GLES20.GlVertexAttribPointer(normalHandle, 3, GLES20.GlFloat, false, 0, Normals_Buffer);
            GLES20.GlVertexAttribPointer(textureCoordHandle, 2, GLES20.GlFloat, false, 0, mTexCoords_Buffer);
            RenderUtils.CheckGLError("DrawMesh:put attrib pointers");

            //enable gl attribs to use
            GLES20.GlEnableVertexAttribArray(vertexHandle);
            //GLES20.GlEnableVertexAttribArray(normalHandle);
            GLES20.GlEnableVertexAttribArray(textureCoordHandle);
            RenderUtils.CheckGLError("DrawMesh:enable attrib arrays");

            // activate texture 0, bind it, and pass to shader
            GLES20.GlActiveTexture(GLES20.GlTexture0);
            GLES20.GlBindTexture(GLES20.GlTexture2d, mTexture_GL_ID);
            GLES20.GlUniform1i(texSampler2DHandle, 0);
            RenderUtils.CheckGLError("DrawMesh:activate texturing");

            // pass the model view matrix to the shader
            GLES20.GlUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
            RenderUtils.CheckGLError("DrawMesh:upload matrix");

            // finally draw the teapot
            GLES20.GlDrawElements(GLES20.GlTriangles, mIndices_Number, GLES20.GlUnsignedShort, mIndex_Buffer);
            RenderUtils.CheckGLError("DrawMesh:draw elements");

            // disable the enabled arrays
            GLES20.GlDisableVertexAttribArray(vertexHandle);
            //GLES20.GlDisableVertexAttribArray(normalHandle);
            GLES20.GlDisableVertexAttribArray(textureCoordHandle);
            RenderUtils.CheckGLError("DrawMesh:disable attrib arrays");
        }
Example #3
0
        /**
         * \brief Draw the video mesh /with keyframe and icons too) (in OpenGL).
         * @param modelView the model-view matrix.
         * @param projection the projection matrix.
         */
        public void DrawMesh(float[] modelView, float[] projection)
        {
            PikkartVideoPlayer.VideoSate.VIDEO_STATE currentStatus = PikkartVideoPlayer.VideoSate.VIDEO_STATE.NOT_READY;
            if (mPikkartVideoPlayer != null)
            {
                currentStatus = mPikkartVideoPlayer.getVideoStatus();
                if (!mPikkartVideoPlayer.isFullscreen())
                {
                    if (mPikkartVideoPlayer.getVideoStatus() == PikkartVideoPlayer.VideoSate.VIDEO_STATE.PLAYING)
                    {
                        mPikkartVideoPlayer.updateVideoData();
                    }
                    mPikkartVideoPlayer.getSurfaceTextureTransformMatrix(mTexCoordTransformationMatrix);
                    SetVideoDimensions(mPikkartVideoPlayer.getVideoWidth(), mPikkartVideoPlayer.getVideoHeight(), mTexCoordTransformationMatrix);
                    mVideoTexCoords_Buffer = FillBuffer(videoTextureCoordsTransformed);
                }
            }

            Marker currentMarker = RecognitionFragment.CurrentMarker;

            if (currentMarker != null)
            {
                float markerWidth  = currentMarker.Width;
                float markerHeight = currentMarker.Height;

                GLES20.GlEnable(GLES20.GlDepthTest);
                //GLES20.GlDisable(GLES20.GlCullFaceMode);
                GLES20.GlCullFace(GLES20.GlBack);
                GLES20.GlFrontFace(GLES20.GlCw);

                if ((currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.READY) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.END) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.NOT_READY) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.ERROR))
                {
                    float[] scaleMatrix = new float[16];
                    RenderUtils.matrix44Identity(scaleMatrix);
                    scaleMatrix[0]  = markerWidth;
                    scaleMatrix[5]  = markerWidth * keyframeAspectRatio;
                    scaleMatrix[10] = markerWidth;

                    float[] temp_mv = new float[16];
                    RenderUtils.matrixMultiply(4, 4, modelView, 4, 4, scaleMatrix, temp_mv);

                    float[] temp_mvp = new float[16];
                    RenderUtils.matrixMultiply(4, 4, projection, 4, 4, temp_mv, temp_mvp);
                    float[] mvpMatrix = new float[16];
                    RenderUtils.matrix44Transpose(temp_mvp, mvpMatrix);

                    DrawKeyFrame(mvpMatrix);
                }
                else
                {
                    float[] scaleMatrix = new float[16];
                    RenderUtils.matrix44Identity(scaleMatrix);
                    scaleMatrix[0]  = markerWidth;
                    scaleMatrix[5]  = markerWidth * videoAspectRatio;
                    scaleMatrix[10] = markerWidth;

                    float[] temp_mv = new float[16];
                    RenderUtils.matrixMultiply(4, 4, modelView, 4, 4, scaleMatrix, temp_mv);

                    float[] temp_mvp = new float[16];
                    RenderUtils.matrixMultiply(4, 4, projection, 4, 4, temp_mv, temp_mvp);
                    float[] mvpMatrix = new float[16];
                    RenderUtils.matrix44Transpose(temp_mvp, mvpMatrix);

                    DrawVideo(mvpMatrix);
                }

                if ((currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.READY) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.END) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.PAUSED) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.NOT_READY) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.ERROR))
                {
                    float[] translateMatrix = new float[16];
                    RenderUtils.matrix44Identity(translateMatrix);
                    //scale a bit
                    translateMatrix[0]  = 0.4f;
                    translateMatrix[5]  = 0.4f;
                    translateMatrix[10] = 0.4f;
                    //translate a bit
                    translateMatrix[3]  = 0.0f;
                    translateMatrix[7]  = 0.45f;
                    translateMatrix[11] = -0.05f;

                    float[] temp_mv = new float[16];
                    RenderUtils.matrixMultiply(4, 4, modelView, 4, 4, translateMatrix, temp_mv);

                    float[] temp_mvp = new float[16];
                    RenderUtils.matrixMultiply(4, 4, projection, 4, 4, temp_mv, temp_mvp);
                    float[] mvpMatrix = new float[16];
                    RenderUtils.matrix44Transpose(temp_mvp, mvpMatrix);

                    DrawIcon(mvpMatrix, currentStatus);
                }
                RenderUtils.CheckGLError("VideoMesh:end video renderer");
            }
        }
        /** Called to draw the current frame. */
        public void OnDrawFrame(IGL10 gl)
        {
            if (!IsActive)
            {
                return;
            }

            gl.GlClear(GL10.GlColorBufferBit | GL10.GlDepthBufferBit);

            // Call our native function to render camera content
            RecognitionFragment.RenderCamera(ViewportWidth, ViewportHeight, Angle);
            if (RecognitionFragment.IsTracking)
            {
                Marker currentMarker = RecognitionFragment.CurrentMarker;
                //Here we decide which 3d object to draw and we draw it
                if (currentMarker.Id.CompareTo("3_522") == 0)
                {
                    float[] mvMatrix = new float[16];
                    float[] pMatrix  = new float[16];
                    if (ComputeModelViewProjectionMatrix(mvMatrix, pMatrix))
                    {
                        if (videoMesh != null && videoMesh.MeshLoaded)
                        {
                            if (videoMesh.GLLoaded)
                            {
                                videoMesh.DrawMesh(mvMatrix, pMatrix);
                            }
                            else
                            {
                                videoMesh.InitMeshGL();
                            }

                            RenderUtils.CheckGLError("completed video mesh Render");
                        }
                    }
                }
                else
                {
                    float[] mvpMatrix = new float[16];
                    if (ComputeModelViewProjectionMatrix(mvpMatrix))
                    {
                        //draw our 3d mesh on top of the marker
                        if (monkeyMesh != null && monkeyMesh.MeshLoaded)
                        {
                            if (monkeyMesh.GLLoaded)
                            {
                                monkeyMesh.DrawMesh(mvpMatrix);
                            }
                            else
                            {
                                monkeyMesh.InitMeshGL();
                            }

                            RenderUtils.CheckGLError("completed Monkey head Render");
                        }
                    }
                }
            }
            //if the video is still playing and we have lost tracking, we still draw the video,
            //but in a fixed frontal position
            if (!RecognitionFragment.IsTracking && videoMesh != null && videoMesh.IsPlaying())
            {
                float[] mvMatrix = new float[16];
                float[] pMatrix  = new float[16];
                ComputeProjectionMatrix(pMatrix);

                if (Angle == 0)
                {
                    mvMatrix[0]  = 1.0f; mvMatrix[1] = 0.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = -0.5f;
                    mvMatrix[4]  = 0.0f; mvMatrix[5] = -1.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = 0.4f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }
                else if (Angle == 90)
                {
                    mvMatrix[0]  = 0.0f; mvMatrix[1] = 1.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = -0.5f;
                    mvMatrix[4]  = 1.0f; mvMatrix[5] = 0.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = -0.5f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }
                else if (Angle == 180)
                {
                    mvMatrix[0]  = -1.0f; mvMatrix[1] = 0.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = 0.5f;
                    mvMatrix[4]  = 0.0f; mvMatrix[5] = 1.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = -0.4f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }
                else if (Angle == 270)
                {
                    mvMatrix[0]  = 0.0f; mvMatrix[1] = -1.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = 0.5f;
                    mvMatrix[4]  = -1.0f; mvMatrix[5] = 0.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = 0.5f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }

                videoMesh.DrawMesh(mvMatrix, pMatrix);
                RenderUtils.CheckGLError("completed video mesh Render");
            }

            gl.GlFinish();
        }