/**
         * \brief Create a Mesh from a json file (comntaining mesh data) and a texture file.
         * @param am the app AssetManager.
         * @param mesh_file the mesh file to load
         * @param texture_file the texture file to load
         * @return true on success.
         */
        public bool InitMesh(AssetManager am, String mesh_file, String texture_file)
        {
            _glLoaded = false;
            LoadMesh(am, mesh_file);
            texture     = RenderUtils.loadTexture(am, texture_file, textureDims);
            _meshLoaded = true;

            return(true);
        }
Пример #2
0
        /**
         * \brief Draw this mesh (in OpenGL).
         * @param modelViewProjection this mesh model-view-projection matrix.
         */
        public void DrawMesh(float[] modelViewProjection)
        {
            //set up gl state
            GLES20.GlEnable(GLES20.GlDepthTest);
            GLES20.GlDisable(GLES20.GlCullFaceMode);
            //GLES20.glCullFace(GLES20.GL_BACK);
            //GLES20.glFrontFace(GLES20.GL_CCW);

            //set shader program to use
            GLES20.GlUseProgram(mProgram_GL_ID);
            RenderUtils.CheckGLError("DrawMesh:glUseProgram");

            //find attrib and unifroms in shader program
            int vertexHandle = GLES20.GlGetAttribLocation(mProgram_GL_ID, "vertexPosition");
            //int normalHandle = GLES20.GlGetAttribLocation(Program_GL_ID, "vertexNormal");
            int textureCoordHandle = GLES20.GlGetAttribLocation(mProgram_GL_ID, "vertexTexCoord");
            int mvpMatrixHandle    = GLES20.GlGetUniformLocation(mProgram_GL_ID, "modelViewProjectionMatrix");
            int texSampler2DHandle = GLES20.GlGetUniformLocation(mProgram_GL_ID, "texSampler2D");

            RenderUtils.CheckGLError("DrawMesh:get attribs and uniforms");

            //upload mesh data to OpenGL attribs
            GLES20.GlVertexAttribPointer(vertexHandle, 3, GLES20.GlFloat, false, 0, mVertices_Buffer);
            //GLES20.GlVertexAttribPointer(normalHandle, 3, GLES20.GlFloat, false, 0, Normals_Buffer);
            GLES20.GlVertexAttribPointer(textureCoordHandle, 2, GLES20.GlFloat, false, 0, mTexCoords_Buffer);
            RenderUtils.CheckGLError("DrawMesh:put attrib pointers");

            //enable gl attribs to use
            GLES20.GlEnableVertexAttribArray(vertexHandle);
            //GLES20.GlEnableVertexAttribArray(normalHandle);
            GLES20.GlEnableVertexAttribArray(textureCoordHandle);
            RenderUtils.CheckGLError("DrawMesh:enable attrib arrays");

            // activate texture 0, bind it, and pass to shader
            GLES20.GlActiveTexture(GLES20.GlTexture0);
            GLES20.GlBindTexture(GLES20.GlTexture2d, mTexture_GL_ID);
            GLES20.GlUniform1i(texSampler2DHandle, 0);
            RenderUtils.CheckGLError("DrawMesh:activate texturing");

            // pass the model view matrix to the shader
            GLES20.GlUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
            RenderUtils.CheckGLError("DrawMesh:upload matrix");

            // finally draw the teapot
            GLES20.GlDrawElements(GLES20.GlTriangles, mIndices_Number, GLES20.GlUnsignedShort, mIndex_Buffer);
            RenderUtils.CheckGLError("DrawMesh:draw elements");

            // disable the enabled arrays
            GLES20.GlDisableVertexAttribArray(vertexHandle);
            //GLES20.GlDisableVertexAttribArray(normalHandle);
            GLES20.GlDisableVertexAttribArray(textureCoordHandle);
            RenderUtils.CheckGLError("DrawMesh:disable attrib arrays");
        }
Пример #3
0
        /** Called to draw the current frame. */
        public void onDrawFrame(IGL10 gl)
        {
            if (!IsActive)
            {
                return;
            }

            gl.GlClear(GL10.GlColorBufferBit | GL10.GlDepthBufferBit);

            // Call our native function to render camera content
            RecognitionFragment.RenderCamera(ViewportWidth, ViewportHeight, Angle);

            float[] mvpMatrix = new float[16];
            if (computeModelViewProjectionMatrix(mvpMatrix))
            {
                //draw our 3d mesh on top of the marker
                monkeyMesh.DrawMesh(mvpMatrix);
                RenderUtils.CheckGLError("completed Monkey head Render");
            }

            gl.GlFinish();
        }
Пример #4
0
        /**
         * \brief Regenerate and transform video texture coordinates
         * @param videoWidth video screen width.
         * @param videoHeight video screen height.
         * @param textureCoordMatrix the transfomr matrix for the video texture coordinates
         */
        private void SetVideoDimensions(float videoWidth, float videoHeight, float[] textureCoordMatrix)
        {
            videoAspectRatio = videoHeight / videoWidth;

            float[] mtx           = textureCoordMatrix;
            float[] tempUVMultRes = null;

            tempUVMultRes = RenderUtils.uvMultMat4f(videoTextureCoords[0], videoTextureCoords[1], mtx);
            videoTextureCoordsTransformed[0] = tempUVMultRes[0];
            videoTextureCoordsTransformed[1] = tempUVMultRes[1];

            tempUVMultRes = RenderUtils.uvMultMat4f(videoTextureCoords[2], videoTextureCoords[3], mtx);
            videoTextureCoordsTransformed[2] = tempUVMultRes[0];
            videoTextureCoordsTransformed[3] = tempUVMultRes[1];

            tempUVMultRes = RenderUtils.uvMultMat4f(videoTextureCoords[4], videoTextureCoords[5], mtx);
            videoTextureCoordsTransformed[4] = tempUVMultRes[0];
            videoTextureCoordsTransformed[5] = tempUVMultRes[1];

            tempUVMultRes = RenderUtils.uvMultMat4f(videoTextureCoords[6], videoTextureCoords[7], mtx);
            videoTextureCoordsTransformed[6] = tempUVMultRes[0];
            videoTextureCoordsTransformed[7] = tempUVMultRes[1];
        }
Пример #5
0
        /**
         * \brief Initialize a VideoMesh object.
         *
         * Intialize a VideMesh object, if necessary creates its own PikkartVideoPlayer.
         * @param am the app AssetManager.
         * @param movieUrl the video to be played, URL or file path
         * @param keyframeUrl the video keyframe image to be displayed
         * @param seekPosition the starting position (in milliseconds)
         * @param autostart whatever the video should autostart on detection
         * @param pikkartVideoPlayer an external PikkartVideoPlayer to use. If null, an internal one will be created.
         * @return true on success.
         */
        public bool InitMesh(AssetManager am,
                             String movieUrl,
                             String keyframeUrl,
                             int seekPosition,
                             bool autostart,
                             PikkartVideoPlayer pikkartVideoPlayer)
        {
            _glLoaded = false;

            GenerateMesh();
            if (pikkartVideoPlayer == null)
            {
                mPikkartVideoPlayer = new PikkartVideoPlayer();
                mPikkartVideoPlayer.init();
                mPikkartVideoPlayer.setActivity(mParentActivity);
            }
            else
            {
                mPikkartVideoPlayer = pikkartVideoPlayer;
            }
            mMovieUrl = movieUrl;
            mKeyframeTextureByteBuffer = RenderUtils.loadTexture(am, keyframeUrl, mKeyframeTextureDims);
            keyframeAspectRatio        = (float)mKeyframeTextureDims[1] / (float)mKeyframeTextureDims[0];

            mSeekPosition = seekPosition;
            mAutostart    = autostart;

            mTexCoordTransformationMatrix = new float[16];


            mIconBusyTextureByteBuffer  = RenderUtils.loadTexture(am, "media/busy.png", mIconBusyTextureDims);
            mIconPlayTextureByteBuffer  = RenderUtils.loadTexture(am, "media/play.png", mIconPlayTextureDims);
            mIconErrorTextureByteBuffer = RenderUtils.loadTexture(am, "media/error.png", mIconErrorTextureDims);
            _meshLoaded = true;

            return(true);
        }
Пример #6
0
        /** Here we compute the model-view-projection matrix for OpenGL rendering
         * from the model-view and projection matrix computed by Pikkart's AR SDK.
         * the projection matrix is rotated accordingly to the screen orientation */
        public bool computeModelViewProjectionMatrix(float[] mvpMatrix)
        {
            RenderUtils.matrix44Identity(mvpMatrix);

            float w = (float)640;
            float h = (float)480;

            float ar = (float)ViewportHeight / (float)ViewportWidth;

            if (ViewportHeight > ViewportWidth)
            {
                ar = 1.0f / ar;
            }
            float h1 = h, w1 = w;

            if (ar < h / w)
            {
                h1 = w * ar;
            }
            else
            {
                w1 = h / ar;
            }

            float a = 0f, b = 0f;

            switch (Angle)
            {
            case 0:
                a = 1f; b = 0f;
                break;

            case 90:
                a = 0f; b = 1f;
                break;

            case 180:
                a = -1f; b = 0f;
                break;

            case 270:
                a = 0f; b = -1f;
                break;

            default: break;
            }

            float[] angleMatrix = new float[16];

            angleMatrix[0]  = a; angleMatrix[1] = b; angleMatrix[2] = 0.0f; angleMatrix[3] = 0.0f;
            angleMatrix[4]  = -b; angleMatrix[5] = a; angleMatrix[6] = 0.0f; angleMatrix[7] = 0.0f;
            angleMatrix[8]  = 0.0f; angleMatrix[9] = 0.0f; angleMatrix[10] = 1.0f; angleMatrix[11] = 0.0f;
            angleMatrix[12] = 0.0f; angleMatrix[13] = 0.0f; angleMatrix[14] = 0.0f; angleMatrix[15] = 1.0f;

            float[] projectionMatrix = (float[])RecognitionFragment.GetCurrentProjectionMatrix().Clone();
            projectionMatrix[5] = projectionMatrix[5] * (h / h1);

            float[] correctedProjection = new float[16];

            RenderUtils.matrixMultiply(4, 4, angleMatrix, 4, 4, projectionMatrix, correctedProjection);

            if (RecognitionFragment.IsTracking)
            {
                float[] modelviewMatrix = RecognitionFragment.GetCurrentModelViewMatrix();
                float[] temp_mvp        = new float[16];
                RenderUtils.matrixMultiply(4, 4, correctedProjection, 4, 4, modelviewMatrix, temp_mvp);
                RenderUtils.matrix44Transpose(temp_mvp, mvpMatrix);
                return(true);
            }
            return(false);
        }
Пример #7
0
        /**
         * \brief Draw the video mesh /with keyframe and icons too) (in OpenGL).
         * @param modelView the model-view matrix.
         * @param projection the projection matrix.
         */
        public void DrawMesh(float[] modelView, float[] projection)
        {
            PikkartVideoPlayer.VideoSate.VIDEO_STATE currentStatus = PikkartVideoPlayer.VideoSate.VIDEO_STATE.NOT_READY;
            if (mPikkartVideoPlayer != null)
            {
                currentStatus = mPikkartVideoPlayer.getVideoStatus();
                if (!mPikkartVideoPlayer.isFullscreen())
                {
                    if (mPikkartVideoPlayer.getVideoStatus() == PikkartVideoPlayer.VideoSate.VIDEO_STATE.PLAYING)
                    {
                        mPikkartVideoPlayer.updateVideoData();
                    }
                    mPikkartVideoPlayer.getSurfaceTextureTransformMatrix(mTexCoordTransformationMatrix);
                    SetVideoDimensions(mPikkartVideoPlayer.getVideoWidth(), mPikkartVideoPlayer.getVideoHeight(), mTexCoordTransformationMatrix);
                    mVideoTexCoords_Buffer = FillBuffer(videoTextureCoordsTransformed);
                }
            }

            Marker currentMarker = RecognitionFragment.CurrentMarker;

            if (currentMarker != null)
            {
                float markerWidth  = currentMarker.Width;
                float markerHeight = currentMarker.Height;

                GLES20.GlEnable(GLES20.GlDepthTest);
                //GLES20.GlDisable(GLES20.GlCullFaceMode);
                GLES20.GlCullFace(GLES20.GlBack);
                GLES20.GlFrontFace(GLES20.GlCw);

                if ((currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.READY) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.END) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.NOT_READY) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.ERROR))
                {
                    float[] scaleMatrix = new float[16];
                    RenderUtils.matrix44Identity(scaleMatrix);
                    scaleMatrix[0]  = markerWidth;
                    scaleMatrix[5]  = markerWidth * keyframeAspectRatio;
                    scaleMatrix[10] = markerWidth;

                    float[] temp_mv = new float[16];
                    RenderUtils.matrixMultiply(4, 4, modelView, 4, 4, scaleMatrix, temp_mv);

                    float[] temp_mvp = new float[16];
                    RenderUtils.matrixMultiply(4, 4, projection, 4, 4, temp_mv, temp_mvp);
                    float[] mvpMatrix = new float[16];
                    RenderUtils.matrix44Transpose(temp_mvp, mvpMatrix);

                    DrawKeyFrame(mvpMatrix);
                }
                else
                {
                    float[] scaleMatrix = new float[16];
                    RenderUtils.matrix44Identity(scaleMatrix);
                    scaleMatrix[0]  = markerWidth;
                    scaleMatrix[5]  = markerWidth * videoAspectRatio;
                    scaleMatrix[10] = markerWidth;

                    float[] temp_mv = new float[16];
                    RenderUtils.matrixMultiply(4, 4, modelView, 4, 4, scaleMatrix, temp_mv);

                    float[] temp_mvp = new float[16];
                    RenderUtils.matrixMultiply(4, 4, projection, 4, 4, temp_mv, temp_mvp);
                    float[] mvpMatrix = new float[16];
                    RenderUtils.matrix44Transpose(temp_mvp, mvpMatrix);

                    DrawVideo(mvpMatrix);
                }

                if ((currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.READY) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.END) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.PAUSED) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.NOT_READY) ||
                    (currentStatus == PikkartVideoPlayer.VideoSate.VIDEO_STATE.ERROR))
                {
                    float[] translateMatrix = new float[16];
                    RenderUtils.matrix44Identity(translateMatrix);
                    //scale a bit
                    translateMatrix[0]  = 0.4f;
                    translateMatrix[5]  = 0.4f;
                    translateMatrix[10] = 0.4f;
                    //translate a bit
                    translateMatrix[3]  = 0.0f;
                    translateMatrix[7]  = 0.45f;
                    translateMatrix[11] = -0.05f;

                    float[] temp_mv = new float[16];
                    RenderUtils.matrixMultiply(4, 4, modelView, 4, 4, translateMatrix, temp_mv);

                    float[] temp_mvp = new float[16];
                    RenderUtils.matrixMultiply(4, 4, projection, 4, 4, temp_mv, temp_mvp);
                    float[] mvpMatrix = new float[16];
                    RenderUtils.matrix44Transpose(temp_mvp, mvpMatrix);

                    DrawIcon(mvpMatrix, currentStatus);
                }
                RenderUtils.CheckGLError("VideoMesh:end video renderer");
            }
        }
        /** Called to draw the current frame. */
        public void OnDrawFrame(IGL10 gl)
        {
            if (!IsActive)
            {
                return;
            }

            gl.GlClear(GL10.GlColorBufferBit | GL10.GlDepthBufferBit);

            // Call our native function to render camera content
            RecognitionFragment.RenderCamera(ViewportWidth, ViewportHeight, Angle);
            if (RecognitionFragment.IsTracking)
            {
                Marker currentMarker = RecognitionFragment.CurrentMarker;
                //Here we decide which 3d object to draw and we draw it
                if (currentMarker.Id.CompareTo("3_522") == 0)
                {
                    float[] mvMatrix = new float[16];
                    float[] pMatrix  = new float[16];
                    if (ComputeModelViewProjectionMatrix(mvMatrix, pMatrix))
                    {
                        if (videoMesh != null && videoMesh.MeshLoaded)
                        {
                            if (videoMesh.GLLoaded)
                            {
                                videoMesh.DrawMesh(mvMatrix, pMatrix);
                            }
                            else
                            {
                                videoMesh.InitMeshGL();
                            }

                            RenderUtils.CheckGLError("completed video mesh Render");
                        }
                    }
                }
                else
                {
                    float[] mvpMatrix = new float[16];
                    if (ComputeModelViewProjectionMatrix(mvpMatrix))
                    {
                        //draw our 3d mesh on top of the marker
                        if (monkeyMesh != null && monkeyMesh.MeshLoaded)
                        {
                            if (monkeyMesh.GLLoaded)
                            {
                                monkeyMesh.DrawMesh(mvpMatrix);
                            }
                            else
                            {
                                monkeyMesh.InitMeshGL();
                            }

                            RenderUtils.CheckGLError("completed Monkey head Render");
                        }
                    }
                }
            }
            //if the video is still playing and we have lost tracking, we still draw the video,
            //but in a fixed frontal position
            if (!RecognitionFragment.IsTracking && videoMesh != null && videoMesh.IsPlaying())
            {
                float[] mvMatrix = new float[16];
                float[] pMatrix  = new float[16];
                ComputeProjectionMatrix(pMatrix);

                if (Angle == 0)
                {
                    mvMatrix[0]  = 1.0f; mvMatrix[1] = 0.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = -0.5f;
                    mvMatrix[4]  = 0.0f; mvMatrix[5] = -1.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = 0.4f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }
                else if (Angle == 90)
                {
                    mvMatrix[0]  = 0.0f; mvMatrix[1] = 1.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = -0.5f;
                    mvMatrix[4]  = 1.0f; mvMatrix[5] = 0.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = -0.5f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }
                else if (Angle == 180)
                {
                    mvMatrix[0]  = -1.0f; mvMatrix[1] = 0.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = 0.5f;
                    mvMatrix[4]  = 0.0f; mvMatrix[5] = 1.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = -0.4f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }
                else if (Angle == 270)
                {
                    mvMatrix[0]  = 0.0f; mvMatrix[1] = -1.0f; mvMatrix[2] = 0.0f; mvMatrix[3] = 0.5f;
                    mvMatrix[4]  = -1.0f; mvMatrix[5] = 0.0f; mvMatrix[6] = 0.0f; mvMatrix[7] = 0.5f;
                    mvMatrix[8]  = 0.0f; mvMatrix[9] = 0.0f; mvMatrix[10] = -1.0f; mvMatrix[11] = -1.3f;
                    mvMatrix[12] = 0.0f; mvMatrix[13] = 0.0f; mvMatrix[14] = 0.0f; mvMatrix[15] = 1.0f;
                }

                videoMesh.DrawMesh(mvMatrix, pMatrix);
                RenderUtils.CheckGLError("completed video mesh Render");
            }

            gl.GlFinish();
        }
        public bool ComputeProjectionMatrix(float[] pMatrix)
        {
            RenderUtils.matrix44Identity(pMatrix);

            float w = (float)640;
            float h = (float)480;

            float ar = (float)ViewportHeight / (float)ViewportWidth;

            if (ViewportHeight > ViewportWidth)
            {
                ar = 1.0f / ar;
            }
            float h1 = h, w1 = w;

            if (ar < h / w)
            {
                h1 = w * ar;
            }
            else
            {
                w1 = h / ar;
            }

            float a = 0f, b = 0f;

            switch (Angle)
            {
            case 0:
                a = 1f; b = 0f;
                break;

            case 90:
                a = 0f; b = 1f;
                break;

            case 180:
                a = -1f; b = 0f;
                break;

            case 270:
                a = 0f; b = -1f;
                break;

            default: break;
            }

            float[] angleMatrix = new float[16];

            angleMatrix[0]  = a; angleMatrix[1] = b; angleMatrix[2] = 0.0f; angleMatrix[3] = 0.0f;
            angleMatrix[4]  = -b; angleMatrix[5] = a; angleMatrix[6] = 0.0f; angleMatrix[7] = 0.0f;
            angleMatrix[8]  = 0.0f; angleMatrix[9] = 0.0f; angleMatrix[10] = 1.0f; angleMatrix[11] = 0.0f;
            angleMatrix[12] = 0.0f; angleMatrix[13] = 0.0f; angleMatrix[14] = 0.0f; angleMatrix[15] = 1.0f;

            float[] projectionMatrix = (float[])RecognitionFragment.GetCurrentProjectionMatrix().Clone();
            projectionMatrix[5] = projectionMatrix[5] * (h / h1);

            RenderUtils.matrixMultiply(4, 4, angleMatrix, 4, 4, projectionMatrix, pMatrix);

            return(true);
        }