Ejemplo n.º 1
0
 public void DrawFrame(SurfaceTexture st)
 {
     CheckGlError("onDrawFrame start");
     st.GetTransformMatrix(mSTMatrix);
     GLES20.GlClearColor(0.0f, 1.0f, 0.0f, 1.0f);
     GLES20.GlClear(GLES20.GlDepthBufferBit | GLES20.GlColorBufferBit);
     GLES20.GlUseProgram(mProgram);
     CheckGlError("glUseProgram");
     GLES20.GlActiveTexture(GLES20.GlTexture0);
     GLES20.GlBindTexture(GLES11Ext.GlTextureExternalOes, _textureID);
     mTriangleVertices.Position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
     GLES20.GlVertexAttribPointer(maPositionHandle, 3, GLES20.GlFloat, false,
                                  TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
     CheckGlError("glVertexAttribPointer maPosition");
     GLES20.GlEnableVertexAttribArray(maPositionHandle);
     CheckGlError("glEnableVertexAttribArray maPositionHandle");
     mTriangleVertices.Position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
     GLES20.GlVertexAttribPointer(maTextureHandle, 2, GLES20.GlFloat, false,
                                  TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
     CheckGlError("glVertexAttribPointer maTextureHandle");
     GLES20.GlEnableVertexAttribArray(maTextureHandle);
     CheckGlError("glEnableVertexAttribArray maTextureHandle");
     Android.Opengl.Matrix.SetIdentityM(mMVPMatrix, 0);
     GLES20.GlUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
     GLES20.GlUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
     GLES20.GlDrawArrays(GLES20.GlTriangleStrip, 0, 4);
     CheckGlError("glDrawArrays");
     GLES20.GlFinish();
 }
Ejemplo n.º 2
0
 /**
  * \brief get the surface texture transformation matrix (used to transform texture coordinates in OpenGL)
  * @param mtx the float array where to store matrix data
  */
 public void getSurfaceTextureTransformMatrix(float[] mtx)
 {
     mSurfaceTextureLock.Lock();
     if (mSurfaceTexture != null)
     {
         mSurfaceTexture.GetTransformMatrix(mtx);
     }
     mSurfaceTextureLock.Unlock();
 }
Ejemplo n.º 3
0
            public void OnDrawFrame(Javax.Microedition.Khronos.Opengles.IGL10 glUnused)
            {
                if (_updateSurface)
                {
                    _surfaceTexture.UpdateTexImage();
                    _surfaceTexture.GetTransformMatrix(_STMatrix);
                    _updateSurface = false;
                }

                GLES20.GlUseProgram(0);
                GLES20.GlUseProgram(_glProgram);
                GLES20.GlActiveTexture(GLES20.GlTexture2);
                var tWidth  = _width;
                var tHeight = _height;

                funnyGhostEffectBuffer = ByteBuffer.AllocateDirect(tWidth * tHeight * 4);
                funnyGhostEffectBuffer.Order(ByteOrder.NativeOrder());
                funnyGhostEffectBuffer.Position(0);

                // Note that it is read in GlReadPixels in a different pixel order than top-left to lower-right, so it adds a reversed+mirror effect
                // when passed to TexImage2D to convert to texture.
                GLES20.GlReadPixels(0, 0, tWidth - 1, tHeight - 1, GLES20.GlRgba, GLES20.GlUnsignedByte, funnyGhostEffectBuffer);
                updateTargetTexture(tWidth, tHeight);
                GLES20.GlBindTexture(GLES20.GlTexture2d, _otherTextureId);
                GLES20.GlUniform1i(_otherTextureUniform, 2);

                GLES20.GlUseProgram(0);
                GLES20.GlUseProgram(_glProgram);
                GLES20.GlActiveTexture(GLES20.GlTexture1);
                GLES20.GlBindTexture(GLES11Ext.GlTextureExternalOes, _OESTextureId);
                GLES20.GlUniform1i(_OESTextureUniform, 1);

                _triangleVertices.Position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
                GLES20.GlVertexAttribPointer(_aPositionHandle, 3, GLES20.GlFloat, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, _triangleVertices);
                GLES20.GlEnableVertexAttribArray(_aPositionHandle);

                _textureVertices.Position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
                GLES20.GlVertexAttribPointer(_aTextureCoord, 2, GLES20.GlFloat, false, TEXTURE_VERTICES_DATA_STRIDE_BYTES, _textureVertices);
                GLES20.GlEnableVertexAttribArray(_aTextureCoord);

                Android.Opengl.Matrix.SetIdentityM(_MVPMatrix, 0);
                GLES20.GlUniformMatrix4fv(_uMVPMatrixHandle, 1, false, _MVPMatrix, 0);
                GLES20.GlUniformMatrix4fv(_uSTMatrixHandle, 1, false, _STMatrix, 0);

                GLES20.GlDrawArrays(GLES20.GlTriangleStrip, 0, 4);

                GLES20.GlFinish();
            }
Ejemplo n.º 4
0
            /**
             * Draws the external texture in SurfaceTexture onto the current EGL surface.
             */
            public void drawFrame(SurfaceTexture st, bool invert)
            {
                checkGlError("onDrawFrame start");
                st.GetTransformMatrix(mSTMatrix);
                if (invert)
                {
                    mSTMatrix[5]  = -mSTMatrix[5];
                    mSTMatrix[13] = 1.0f - mSTMatrix[13];
                }

                // (optional) clear to green so we can see if we're failing to set pixels
                GLES20.GlClearColor(0.0f, 1.0f, 0.0f, 1.0f);
                GLES20.GlClear(GLES20.GlColorBufferBit);

                GLES20.GlUseProgram(mProgram);
                checkGlError("glUseProgram");

                GLES20.GlActiveTexture(GLES20.GlTexture0);
                GLES20.GlBindTexture(GLES11Ext.GlTextureExternalOes, mTextureID);

                mTriangleVertices.Position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
                GLES20.GlVertexAttribPointer(maPositionHandle, 3, GLES20.GlFloat, false,
                                             TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
                checkGlError("glVertexAttribPointer maPosition");
                GLES20.GlEnableVertexAttribArray(maPositionHandle);
                checkGlError("glEnableVertexAttribArray maPositionHandle");

                mTriangleVertices.Position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
                GLES20.GlVertexAttribPointer(maTextureHandle, 2, GLES20.GlFloat, false,
                                             TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
                checkGlError("glVertexAttribPointer maTextureHandle");
                GLES20.GlEnableVertexAttribArray(maTextureHandle);
                checkGlError("glEnableVertexAttribArray maTextureHandle");

                Android.Opengl.Matrix.SetIdentityM(mMVPMatrix, 0);
                GLES20.GlUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
                GLES20.GlUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

                GLES20.GlDrawArrays(GLES20.GlTriangleStrip, 0, 4);
                checkGlError("glDrawArrays");

                GLES20.GlBindTexture(GLES11Ext.GlTextureExternalOes, 0);
            }
Ejemplo n.º 5
0
            public override void Run()
            {
                Prepare();

                while (!mStopped)
                {
                    if (_externalVideoInputManager._curVideoInput != _externalVideoInputManager._newVideoInput)
                    {
                        Log.Info(TAG, "New video input selected");
                        // Current video input is running, but we now
                        // introducing a new video type.
                        // The new video input type may be null, referring
                        // that we are not using any video.
                        if (_externalVideoInputManager._curVideoInput != null)
                        {
                            _externalVideoInputManager._curVideoInput.OnVideoStopped(mThreadContext);
                            Log.Info(TAG, "recycle stopped input");
                        }

                        _externalVideoInputManager._curVideoInput = _externalVideoInputManager._newVideoInput;
                        if (_externalVideoInputManager._curVideoInput != null)
                        {
                            _externalVideoInputManager._curVideoInput.OnVideoInitialized(mSurface);
                            Log.Info(TAG, "initialize new input");
                        }

                        if (_externalVideoInputManager._curVideoInput == null)
                        {
                            continue;
                        }

                        Size size = _externalVideoInputManager._curVideoInput.OnGetFrameSize();
                        mVideoWidth  = size.Width;
                        mVideoHeight = size.Height;
                        mSurfaceTexture.SetDefaultBufferSize(mVideoWidth, mVideoHeight);

                        if (mPaused)
                        {
                            // If current thread is in pause state, it must be paused
                            // because of switching external video sources.
                            mPaused = false;
                        }
                    }
                    else if (_externalVideoInputManager._curVideoInput != null && !_externalVideoInputManager._curVideoInput.IsRunning)
                    {
                        // Current video source has been stopped by other
                        // mechanisms (video playing has completed, etc).
                        // A callback method is invoked to do some collect
                        // or release work.
                        // Note that we also set the new video source null,
                        // meaning at meantime, we are not introducing new
                        // video types.
                        Log.Info(TAG, "current video input is not running");
                        _externalVideoInputManager._curVideoInput.OnVideoStopped(mThreadContext);
                        _externalVideoInputManager._curVideoInput = null;
                        _externalVideoInputManager._newVideoInput = null;
                    }

                    if (mPaused || _externalVideoInputManager._curVideoInput == null)
                    {
                        WaitForTime(DEFAULT_WAIT_TIME);
                        continue;
                    }

                    try
                    {
                        mSurfaceTexture.UpdateTexImage();
                        mSurfaceTexture.GetTransformMatrix(mTransform);
                    }
                    catch (Java.Lang.Exception e)
                    {
                        e.PrintStackTrace();
                    }

                    if (_externalVideoInputManager._curVideoInput != null)
                    {
                        _externalVideoInputManager._curVideoInput.OnFrameAvailable(mThreadContext, mTextureId, mTransform);
                    }

                    mEglCore.MakeCurrent(mEglSurface);
                    GLES20.GlViewport(0, 0, mVideoWidth, mVideoHeight);

                    if (_externalVideoInputManager._consumer != null)
                    {
                        Log.Error(TAG, "publish stream with ->width:" + mVideoWidth + ",height:" + mVideoHeight);

                        /**Receives the video frame in texture,and push it out
                         * @param textureId ID of the texture
                         * @param format Pixel format of the video frame
                         * @param width Width of the video frame
                         * @param height Height of the video frame
                         * @param rotation Clockwise rotating angle (0, 90, 180, and 270 degrees) of the video frame
                         * @param timestamp Timestamp of the video frame. For each video frame, you need to set a timestamp
                         * @param matrix Matrix of the texture. The float value is between 0 and 1, such as 0.1, 0.2, and so on*/
                        _externalVideoInputManager._textureTransformer.Copy(mTextureId, MediaIO.PixelFormat.TextureOes.IntValue(), mVideoWidth, mVideoHeight);
                        _externalVideoInputManager._consumer.ConsumeTextureFrame(mTextureId,
                                                                                 MediaIO.PixelFormat.TextureOes.IntValue(),
                                                                                 mVideoWidth, mVideoHeight, 0,
                                                                                 DateTime.Now.Millisecond, mTransform);
                    }

                    // The pace at which the output Surface is sampled
                    // for video frames is controlled by the waiting
                    // time returned from the external video source.
                    WaitForNextFrame();
                }

                if (_externalVideoInputManager._curVideoInput != null)
                {
                    // The manager will cause the current
                    // video source to be stopped.
                    _externalVideoInputManager._curVideoInput.OnVideoStopped(mThreadContext);
                }
                Release();
            }