Ejemplo n.º 1
0
        public Square()
        {
            // initialize vertex byte buffer for shape coordinates
            ByteBuffer bb = ByteBuffer.AllocateDirect(
                // (# of coordinate values * 4 bytes per float)
                squareCoords.Length * 4);

            bb.Order(ByteOrder.NativeOrder());
            vertexBuffer = bb.AsFloatBuffer();
            vertexBuffer.Put(squareCoords);
            vertexBuffer.Position(0);

            // initialize byte buffer for the draw list
            ByteBuffer dlb = ByteBuffer.AllocateDirect(
                // (# of coordinate values * 2 bytes per short)
                drawOrder.Length * 2);

            dlb.Order(ByteOrder.NativeOrder());
            drawListBuffer = dlb.AsShortBuffer();
            drawListBuffer.Put(drawOrder);
            drawListBuffer.Position(0);

            // prepare shaders and OpenGL program
            int vertexShader = MyGLRenderer.LoadShader(GLES30.GlVertexShader,
                                                       vertexShaderCode);
            int fragmentShader = MyGLRenderer.LoadShader(GLES30.GlFragmentShader,
                                                         fragmentShaderCode);

            mProgram = GLES30.GlCreateProgram();                          // create empty OpenGL Program
            GLES30.GlAttachShader(mProgram, vertexShader);                // add the vertex shader to program
            GLES30.GlAttachShader(mProgram, fragmentShader);              // add the fragment shader to program
            GLES30.GlLinkProgram(mProgram);                               // create OpenGL program executables
        }
        /// <summary>
        /// Read and parse the Wavefront file(.obj) that in Assets folder.
        /// </summary>
        /// <param name="context">Context.</param>
        /// <returns></returns>
        private Optional ReadObject(Context context)
        {
            LoadResult   obj    = null;
            AssetManager assets = context.Assets;

            using (StreamReader sr = new StreamReader(assets.Open(objFileName)))
            {
                obj = Load(sr.ReadToEnd());
            }

            int       numVerticesPerFace = 3;
            IntBuffer objectIndices      = ObjDataBufferHelper.GetFaceVerticesIntBuffer(obj.Groups, numVerticesPerFace);

            FloatBuffer objectVertices = ObjDataBufferHelper.GetVerticesFloatBuffer(obj.Vertices);

            CalculateBoundingBox(objectVertices);

            // Size of the allocated buffer.
            ShortBuffer indices = ByteBuffer.AllocateDirect(2 * objectIndices.Limit())
                                  .Order(ByteOrder.NativeOrder()).AsShortBuffer();

            while (objectIndices.HasRemaining)
            {
                indices.Put((short)objectIndices.Get());
            }
            indices.Rewind();

            int         dimensionOfTextures = 2;
            FloatBuffer texCoordinates      = ObjDataBufferHelper.GetTexturesFloatBuffer(obj.Textures, dimensionOfTextures);
            FloatBuffer normals             = ObjDataBufferHelper.GetNormalsFloatBuffer(obj.Normals);

            return(Optional.Of(new ObjectData(objectIndices, objectVertices, indices, texCoordinates, normals)));
        }
        private ByteBuffer GetPhotoAsByteBuffer(byte[] bytes, int width, int height)
        {
            var modelInputSize = FloatSize * height * width * PixelSize;

            var bitmap = BitmapFactory.DecodeByteArray(bytes, 0, bytes.Length);
            var resizedBitmap = Bitmap.CreateScaledBitmap(bitmap, width, height, true);

            var byteBuffer = ByteBuffer.AllocateDirect(modelInputSize);
            byteBuffer.Order(ByteOrder.NativeOrder());

            var pixels = new int[width * height];
            resizedBitmap.GetPixels(pixels, 0, resizedBitmap.Width, 0, 0, resizedBitmap.Width, resizedBitmap.Height);

            var pixel = 0;

            for (var i = 0; i < width; i++)
            {
                for (var j = 0; j < height; j++)
                {
                    var pixelVal = pixels[pixel++];

                    byteBuffer.PutFloat(pixelVal >> 16 & 0xFF);
                    byteBuffer.PutFloat(pixelVal >> 8 & 0xFF);
                    byteBuffer.PutFloat(pixelVal & 0xFF);
                }
            }

            bitmap.Recycle();

            return byteBuffer;
        }
Ejemplo n.º 4
0
        public Triangle()
        {
            // initialize vertex byte buffer for shape coordinates
            ByteBuffer bb = ByteBuffer.AllocateDirect(
                // (number of coordinate values * 4 bytes per float)
                triangleCoords.Length * 4);

            // use the device hardware's native byte order
            bb.Order(ByteOrder.NativeOrder());

            // create a floating point buffer from the ByteBuffer
            vertexBuffer = bb.AsFloatBuffer();
            // add the coordinates to the FloatBuffer
            vertexBuffer.Put(triangleCoords);
            // set the buffer to read the first coordinate
            vertexBuffer.Position(0);

            // prepare shaders and OpenGL program
            int vertexShader = MyGLRenderer.LoadShader(GLES20.GL_VERTEX_SHADER,
                                                       vertexShaderCode);
            int fragmentShader = MyGLRenderer.LoadShader(GLES20.GL_FRAGMENT_SHADER,
                                                         fragmentShaderCode);

            mProgram = GLES20.GlCreateProgram();             // create empty OpenGL Program
            GLES20.GlAttachShader(mProgram, vertexShader);   // add the vertex shader to program
            GLES20.GlAttachShader(mProgram, fragmentShader); // add the fragment shader to program
            GLES20.GlLinkProgram(mProgram);                  // create OpenGL program executables
        }
Ejemplo n.º 5
0
        public virtual void clearPath()
        {
            ByteBuffer vertexByteBuffer = ByteBuffer.AllocateDirect(MAX_VERTICES * BYTES_PER_FLOAT);

            vertexByteBuffer.Order(ByteOrder.NativeOrder());
            mVertexBuffer = vertexByteBuffer.AsFloatBuffer();
        }
Ejemplo n.º 6
0
        private ByteBuffer ConvertImageByteArrayToByteBuffer(byte[] image, int width, int height)
        {
            var bitmap        = BitmapFactory.DecodeByteArray(image, 0, image.Length);
            var resizedBitmap = Bitmap.CreateScaledBitmap(bitmap, width, height, true);

            var modelInputSize = FLOAT_SIZE * height * width * PIXEL_SIZE;
            var byteBuffer     = ByteBuffer.AllocateDirect(modelInputSize);

            byteBuffer.Order(ByteOrder.NativeOrder());

            var pixels = new int[width * height];

            resizedBitmap.GetPixels(pixels, 0, resizedBitmap.Width, 0, 0, resizedBitmap.Width, resizedBitmap.Height);

            var pixel = 0;

            //Loop through each pixels to create a Java.Nio.ByteBuffer
            for (var i = 0; i < width; i++)
            {
                for (var j = 0; j < height; j++)
                {
                    var pixelVal = pixels[pixel++];

                    byteBuffer.PutFloat(pixelVal >> 16 & 0xFF);
                    byteBuffer.PutFloat(pixelVal >> 8 & 0xFF);
                    byteBuffer.PutFloat(pixelVal & 0xFF);
                }
            }

            bitmap.Recycle();

            return(byteBuffer);
        }
Ejemplo n.º 7
0
        /*
         * Process an image and identify what is in it. When done, the method
         * {@link #onPhotoRecognitionReady(Collection)} must be called with the results of
         * the image recognition process.
         *
         * @param image Bitmap containing the image to be classified. The image can be
         *              of any size, but preprocessing might occur to resize it to the
         *              format expected by the classification process, which can be time
         *              and power consuming.
         */
        List <Recognition> DoRecognize(Bitmap image)
        {
            // Allocate space for the inference results
            var count = mLabels.Count;

            // Allocate buffer for image pixels.
            int[]      intValues = new int[TF_INPUT_IMAGE_WIDTH * TF_INPUT_IMAGE_HEIGHT];
            ByteBuffer imgData   = ByteBuffer.AllocateDirect(
                4 * DIM_BATCH_SIZE * TF_INPUT_IMAGE_WIDTH * TF_INPUT_IMAGE_HEIGHT * DIM_PIXEL_SIZE);

            imgData.Order(ByteOrder.NativeOrder());

            // Read image data into buffer formatted for the TensorFlow model
            TensorFlowHelper.ConvertBitmapToByteBuffer(image, intValues, imgData);

            // Run inference on the network with the image bytes in imgData as input,
            // storing results on the confidencePerLabel array. initialize arrays.
            float[][] confidence = new float[1][];
            confidence[0] = new float[count];

            //wrap it inside a Java Object
            var conf = FromArray <float[]>(confidence);

            mTensorFlowLite.Run(imgData, conf);

            //convert it back
            confidence = conf.ToArray <float[]>();
            List <Recognition> results = TensorFlowHelper.GetBestResults(confidence[0], mLabels);

            return(results);
        }
        public CameraFrustumAndAxis()
        {
            // Set model matrix to the identity
            Matrix.SetIdentityM(ModelMatrix, 0);

            // Put vertices into a vertex buffer
            ByteBuffer byteBuf = ByteBuffer.AllocateDirect(mVertices.Length * 4);

            byteBuf.Order(ByteOrder.NativeOrder());
            mVertexBuffer = byteBuf.AsFloatBuffer();
            mVertexBuffer.Put(mVertices);
            mVertexBuffer.Position(0);

            // Put colors into a color buffer
            ByteBuffer cByteBuff = ByteBuffer.AllocateDirect(mColors.Length * 4);

            cByteBuff.Order(ByteOrder.NativeOrder());
            mColorBuffer = cByteBuff.AsFloatBuffer();
            mColorBuffer.Put(mColors);
            mColorBuffer.Position(0);

            // Load the vertex and fragment shaders, then link the program
            int vertexShader = RenderUtils.loadShader(GLES20.GlVertexShader, sVertexShaderCode);
            int fragShader   = RenderUtils.loadShader(GLES20.GlFragmentShader, sFragmentShaderCode);

            mProgram = GLES20.GlCreateProgram();
            GLES20.GlAttachShader(mProgram, vertexShader);
            GLES20.GlAttachShader(mProgram, fragShader);
            GLES20.GlLinkProgram(mProgram);
        }
Ejemplo n.º 9
0
        private void allocateWeightBuffer(RenderList mat, Dictionary <int, int> rename)
        {
            ByteBuffer rbb = ByteBuffer.AllocateDirect(Weight.Length);

            rbb.Order(ByteOrder.NativeOrder());
            mat.weight = rbb;
        }
        /**
         * Allocates and initializes OpenGL resources needed by the background renderer.  Must be
         * called on the OpenGL thread, typically in
         * {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10, EGLConfig)}.
         *
         * @param context Needed to access shader source.
         */
        public void CreateOnGlThread(Context context)
        {
            // Generate the background texture.
            var textures = new int[1];

            //GLES20 = OpenGL ES 2.0
            //Gibt eine bestimmte Anzahl an freien Texturnamen zurück.
            GLES20.GlGenTextures(1, textures, 0);

            TextureId = textures[0];
            GLES20.GlBindTexture(mTextureTarget, TextureId);
            GLES20.GlTexParameteri(mTextureTarget, GLES20.GlTextureWrapS, GLES20.GlClampToEdge);
            GLES20.GlTexParameteri(mTextureTarget, GLES20.GlTextureWrapT, GLES20.GlClampToEdge);
            GLES20.GlTexParameteri(mTextureTarget, GLES20.GlTextureMinFilter, GLES20.GlNearest);
            GLES20.GlTexParameteri(mTextureTarget, GLES20.GlTextureMagFilter, GLES20.GlNearest);

            int numVertices = 4;

            if (numVertices != QUAD_COORDS.Length / COORDS_PER_VERTEX)
            {
                throw new Exception("Unexpected number of vertices in BackgroundRenderer.");
            }

            var bbVertices = ByteBuffer.AllocateDirect(QUAD_COORDS.Length * FLOAT_SIZE);

            bbVertices.Order(ByteOrder.NativeOrder());
            mQuadVertices = bbVertices.AsFloatBuffer();
            mQuadVertices.Put(QUAD_COORDS);
            mQuadVertices.Position(0);

            var bbTexCoords = ByteBuffer.AllocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);

            bbTexCoords.Order(ByteOrder.NativeOrder());
            mQuadTexCoord = bbTexCoords.AsFloatBuffer();
            mQuadTexCoord.Put(QUAD_TEXCOORDS);
            mQuadTexCoord.Position(0);

            var bbTexCoordsTransformed = ByteBuffer.AllocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);

            bbTexCoordsTransformed.Order(ByteOrder.NativeOrder());
            mQuadTexCoordTransformed = bbTexCoordsTransformed.AsFloatBuffer();

            int vertexShader = ShaderUtil.LoadGLShader(TAG, context,
                                                       GLES20.GlVertexShader, Resource.Raw.screenquad_vertex);
            int fragmentShader = ShaderUtil.LoadGLShader(TAG, context,
                                                         GLES20.GlFragmentShader, Resource.Raw.screenquad_fragment_oes);

            mQuadProgram = GLES20.GlCreateProgram();
            GLES20.GlAttachShader(mQuadProgram, vertexShader);
            GLES20.GlAttachShader(mQuadProgram, fragmentShader);
            GLES20.GlLinkProgram(mQuadProgram);
            GLES20.GlUseProgram(mQuadProgram);

            ShaderUtil.CheckGLError(TAG, "Program creation");

            mQuadPositionParam = GLES20.GlGetAttribLocation(mQuadProgram, "a_Position");
            mQuadTexCoordParam = GLES20.GlGetAttribLocation(mQuadProgram, "a_TexCoord");

            ShaderUtil.CheckGLError(TAG, "Program parameters");
        }
Ejemplo n.º 11
0
        // Wrap a float[] in a direct FloatBuffer using native byte order.
        private static FloatBuffer directNativeFloatBuffer(float[] array)
        {
            FloatBuffer buffer = ByteBuffer.AllocateDirect(array.Length * 4).Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            buffer.Put(array);
            buffer.Flip();
            return(buffer);
        }
Ejemplo n.º 12
0
	public TextureManager() {
		mTriangleVertices = ByteBuffer.AllocateDirect(
				mTriangleVerticesData.Length * FLOAT_SIZE_BYTES)
				.Order(ByteOrder.NativeOrder()).AsFloatBuffer();
		mTriangleVertices.Put(mTriangleVerticesData).Position(0);

        Android.Opengl.Matrix.SetIdentityM(mSTMatrix, 0);
	}
Ejemplo n.º 13
0
        /*
         * Process an image and identify what is in it. When done, the method
         * {@link #onPhotoRecognitionReady(Collection)} must be called with the results of
         * the image recognition process.
         *
         * @param image Bitmap containing the image to be classified. The image can be
         *              of any size, but preprocessing might occur to resize it to the
         *              format expected by the classification process, which can be time
         *              and power consuming.
         */
        void DoRecognize(Bitmap image)
        {
            // Allocate space for the inference results
            var count = mLabels.Count;

            // Allocate buffer for image pixels.
            int[] intValues = new int[TF_INPUT_IMAGE_WIDTH * TF_INPUT_IMAGE_HEIGHT];
            //ByteBuffer imgData = ByteBuffer.AllocateDirect(
            //        4 * DIM_BATCH_SIZE * TF_INPUT_IMAGE_WIDTH * TF_INPUT_IMAGE_HEIGHT * DIM_PIXEL_SIZE);
            ByteBuffer imgData = ByteBuffer.AllocateDirect(
                DIM_BATCH_SIZE * TF_INPUT_IMAGE_WIDTH * TF_INPUT_IMAGE_HEIGHT * DIM_PIXEL_SIZE);

            imgData.Order(ByteOrder.NativeOrder());

            // Read image data into buffer formatted for the TensorFlow model
            TensorFlowHelper.ConvertBitmapToByteBuffer(image, intValues, imgData);

            // Run inference on the network with the image bytes in imgData as input,
            // storing results on the confidencePerLabel array.
            //ByteBuffer confidenceByteBuffer = ByteBuffer.Allocate(count);
            //mTensorFlowLite.Run(imgData, confidenceByteBuffer);
            //byte[] confidenceByteArray = ConvertResults(confidenceByteBuffer);

            //var confidenceBuffer = FloatBuffer.Allocate(4 * count);


            byte[][] confidence = new byte[1][];
            confidence[0] = new byte[count];

            var conf = Java.Lang.Object.FromArray <byte[]>(confidence);

            mTensorFlowLite.Run(imgData, conf);

            confidence = conf.ToArray <byte[]>();
            List <Recognition> results = TensorFlowHelper.GetBestResults(confidence[0], mLabels);


            /*float[][] confidence = new float[1][];
             * confidence[0] = new float[count];
             *
             * var conf = Java.Lang.Object.FromArray<float[]>(confidence);
             * mTensorFlowLite.Run(imgData, conf);
             *
             * confidence = conf.ToArray<float[]>();
             * List<Recognition> results = TensorFlowHelper.GetBestResults(confidence[0], mLabels);
             */
            //float[] confidenceArray = ConvertResults(confidenceBuffer.AsFloatBuffer());
            //float[] confidenceByteArray = ConvertResultsFloat(confidenceBuffer, count);

            // Get the results with the highest confidence and map them to their labels
            //List<Recognition> results = TensorFlowHelper.GetBestResults(confidenceArray, mLabels);
            //List<Recognition> results = TensorFlowHelper.GetBestResults(confidenceByteArray, mLabels);
            //List<Recognition> results = TensorFlowHelper.GetBestResults(confidencePerLabel, mLabels);

            // Report the results with the highest confidence
            OnPhotoRecognitionReady(results);
        }
Ejemplo n.º 14
0
        public FirstTriangleScreen(IGame g) : base(g)
        {
            _glGraphics = ((AndroidGLGame)g).GLGraphics;
            ByteBuffer bb = ByteBuffer.AllocateDirect(3 * _vertexSize);

            bb.Order(ByteOrder.NativeOrder());
            _vertices = bb.AsFloatBuffer();
            _vertices.Put(new float[] { 0.0f, 0.0f, 539.0f, 0.0f, 270.0f, 959.0f });
            _vertices.Flip();
        }
Ejemplo n.º 15
0
        public TexturedTriangleScreen(IGame g) : base(g)
        {
            _glGraphics = ((AndroidGLGame)g).GLGraphics;
            ByteBuffer bb = ByteBuffer.AllocateDirect(_vertexSize * 3);

            bb.Order(ByteOrder.NativeOrder());
            _vertices = bb.AsFloatBuffer();
            _vertices.Put(new float[] { 0f, 0f, 0f, 1f, 540f, 0f, 1f, 1f, 270f, 960f, 0.5f, 0f });
            _vertices.Flip();
            _textureId = LoadTexture("bobargb8888.png");
        }
        public void TestWriteObjectWithIdentifiedDataSerializable()
        {
            var serializationService = CreateSerializationService(1, ByteOrder.NativeOrder());

            var serializable            = new SampleIdentifiedDataSerializable('c', 2);
            var objectCarryingPortable1 = new ObjectCarryingPortable(serializable);
            var data = serializationService.ToData(objectCarryingPortable1);
            var objectCarryingPortable2 = serializationService.ToObject <ObjectCarryingPortable>(data);

            Assert.AreEqual(objectCarryingPortable1, objectCarryingPortable2);
        }
Ejemplo n.º 17
0
        public virtual void Pixels(int w, int h, int[] pixels)
        {
            Bind();

            var imageBuffer = ByteBuffer.AllocateDirect(w * h * 4).Order(ByteOrder.NativeOrder()).AsIntBuffer();

            imageBuffer.Put(pixels);
            imageBuffer.Position(0);

            GLES20.GlTexImage2D(GLES20.GlTexture2d, 0, GLES20.GlRgba, w, h, 0, GLES20.GlRgba, GLES20.GlUnsignedByte, imageBuffer);
        }
Ejemplo n.º 18
0
        FloatBuffer PrepareBuffer(float[] data)
        {
            ByteBuffer buffer = ByteBuffer.AllocateDirect(data.Length * 4);

            buffer.Order(ByteOrder.NativeOrder());
            var result = buffer.AsFloatBuffer();

            result.Put(data);
            result.Position(0);

            return(result);
        }
Ejemplo n.º 19
0
        public Flare(int nRays, float radius)
            : base(0, 0, 0, 0)
        {
            // FIXME
            // Texture is incorrectly created every time we need
            // to show the effect, it must be refactored

            var gradient = new[] {
                Android.Graphics.Color.Argb(0xFF, 0xFF, 0xFF, 0xFF),
                Android.Graphics.Color.Argb(0x00, 0xFF, 0xFF, 0xFF)
            };

            _texture = new Gradient(gradient);

            _nRays = nRays;

            Angle        = 45;
            AngularSpeed = 180;

            _vertices = ByteBuffer.AllocateDirect((nRays * 2 + 1) * 4 * (sizeof(float))).Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            _indices = ByteBuffer.AllocateDirect(nRays * 3 * sizeof(short)).Order(ByteOrder.NativeOrder()).AsShortBuffer();

            var v = new float[4];

            v[0] = 0;
            v[1] = 0;
            v[2] = 0.25f;
            v[3] = 0;
            _vertices.Put(v);

            v[2] = 0.75f;
            v[3] = 0;

            for (var i = 0; i < nRays; i++)
            {
                var a = i * 3.1415926f * 2 / nRays;
                v[0] = FloatMath.Cos(a) * radius;
                v[1] = FloatMath.Sin(a) * radius;
                _vertices.Put(v);

                a   += 3.1415926f * 2 / nRays / 2;
                v[0] = FloatMath.Cos(a) * radius;
                v[1] = FloatMath.Sin(a) * radius;
                _vertices.Put(v);

                _indices.Put(0);
                _indices.Put((short)(1 + i * 2));
                _indices.Put((short)(2 + i * 2));
            }

            _indices.Position(0);
        }
Ejemplo n.º 20
0
 private IInputOutputFactory CreateInputOutputFactory()
 {
     if (_byteOrder == null)
     {
         _byteOrder = ByteOrder.BigEndian;
     }
     if (_useNativeByteOrder || _byteOrder == ByteOrder.NativeOrder())
     {
         _byteOrder = ByteOrder.NativeOrder();
     }
     return(new ByteArrayInputOutputFactory(_byteOrder));
 }
Ejemplo n.º 21
0
        public ColoredTriangleScreen(IGame g) : base(g)
        {
            _glGraphics = ((AndroidGLGame)g).GLGraphics;
            ByteBuffer bb = ByteBuffer.AllocateDirect(3 * _vertexSize);

            bb.Order(ByteOrder.NativeOrder());
            _vertices = bb.AsFloatBuffer();
            _vertices.Put(new float[] { 0, 0, 1, 0, 0, 1,
                                        540, 0, 0, 1, 0, 1,
                                        270, 960, 0, 0, 1, 1 });
            _vertices.Flip();
        }
Ejemplo n.º 22
0
 public MainRenderer(MainView view)
 {
     mView = view;
     float[] vtmp = { 1.0f, -1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f };
     float[] ttmp = { 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f };
     pVertex = ByteBuffer.AllocateDirect(8 * 4).Order(ByteOrder.NativeOrder()).AsFloatBuffer();
     pVertex.Put(vtmp);
     pVertex.Position(0);
     pTexCoord = ByteBuffer.AllocateDirect(8 * 4).Order(ByteOrder.NativeOrder()).AsFloatBuffer();
     pTexCoord.Put(ttmp);
     pTexCoord.Position(0);
 }
Ejemplo n.º 23
0
            /// <summary>
            /// 定义一个工具方法,将float[]数组转换为OpenGL ES所需的FloatBuffer
            /// </summary>
            /// <param name="arr"></param>
            /// <returns></returns>
            private FloatBuffer floatBufferUtil(float[] arr)
            {
                FloatBuffer mBuffer;
                // 初始化ByteBuffer,长度为arr数组的长度*4,因为一个int占4个字节
                ByteBuffer qbb = ByteBuffer.AllocateDirect(arr.Length * 4);

                // 数组排列用nativeOrder
                qbb.Order(ByteOrder.NativeOrder());
                mBuffer = qbb.AsFloatBuffer();
                mBuffer.Put(arr);
                mBuffer.Position(0);
                return(mBuffer);
            }
Ejemplo n.º 24
0
        public virtual void Pixels(int w, int h, byte[] pixels)
        {
            Bind();

            ByteBuffer imageBuffer = ByteBuffer.AllocateDirect(w * h).Order(ByteOrder.NativeOrder());

            imageBuffer.Put(pixels);
            imageBuffer.Position(0);

            GLES20.GlPixelStorei(GLES20.GlUnpackAlignment, 1);

            GLES20.GlTexImage2D(GLES20.GlTexture2d, 0, GLES20.GlAlpha, w, h, 0, GLES20.GlAlpha, GLES20.GlUnsignedByte, imageBuffer);
        }
        private void DrawLabel(float[] cameraViews, float[] cameraProjection)
        {
            ShaderUtil.CheckGlError(TAG, "Draw label start.");
            Matrix.MultiplyMM(modelViewMatrix, 0, cameraViews, 0, modelMatrix, 0);
            Matrix.MultiplyMM(modelViewProjectionMatrix, 0, cameraProjection, 0, modelViewMatrix, 0);

            float halfWidth  = LABEL_WIDTH / 2.0f;
            float halfHeight = LABEL_HEIGHT / 2.0f;

            float[] vertices =
            {
                -halfWidth, -halfHeight, 1,
                -halfWidth, halfHeight,  1,
                halfWidth,  halfHeight,  1,
                halfWidth,  -halfHeight, 1,
            };

            // The size of each floating point is 4 bits.
            FloatBuffer vetBuffer = ByteBuffer.AllocateDirect(4 * vertices.Length)
                                    .Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            vetBuffer.Rewind();
            for (int i = 0; i < vertices.Length; ++i)
            {
                vetBuffer.Put(vertices[i]);
            }
            vetBuffer.Rewind();

            // The size of each floating point is 4 bits.
            GLES20.GlVertexAttribPointer(glPositionParameter, COORDS_PER_VERTEX, GLES20.GlFloat,
                                         false, 4 * COORDS_PER_VERTEX, vetBuffer);

            // Set the sequence of OpenGL drawing points to generate two triangles that form a plane.
            short[] indices = { 0, 1, 2, 0, 2, 3 };

            // Size of the allocated buffer.
            ShortBuffer idxBuffer = ByteBuffer.AllocateDirect(2 * indices.Length)
                                    .Order(ByteOrder.NativeOrder()).AsShortBuffer();

            idxBuffer.Rewind();
            for (int i = 0; i < indices.Length; ++i)
            {
                idxBuffer.Put(indices[i]);
            }
            idxBuffer.Rewind();

            GLES20.GlUniformMatrix4fv(glModelViewProjectionMatrix, 1, false, modelViewProjectionMatrix, 0);

            GLES20.GlDrawElements(GLES20.GlTriangleStrip, idxBuffer.Limit(), GLES20.GlUnsignedShort, idxBuffer);
            ShaderUtil.CheckGlError(TAG, "Draw label end.");
        }
        private void Init()
        {
            var fullQuadCoords = new float[] { -1, 1, -1, -1, 1, 1, 1, -1 };

            _fullQuadVertices = ByteBuffer.AllocateDirect(fullQuadCoords.Length * 4)
                                .Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            _fullQuadVertices.Put(fullQuadCoords).Position(0);

            PreserveEGLContextOnPause = true;
            SetEGLContextClientVersion(2);
            SetRenderer(this);
            RenderMode = Android.Opengl.Rendermode.WhenDirty;
        }
Ejemplo n.º 27
0
        private ByteBuffer AllocateByteBuffer()
        {
            var bufferSize = BatchSize
                             * ImageWidth
                             * ImageHeight
                             * PixelSize
                             * NumBytesPerChannel
                             * (NormalizeMobileNetInputs ? 4 : 1);

            var imgData = ByteBuffer.AllocateDirect(bufferSize);

            imgData.Order(ByteOrder.NativeOrder());
            return(imgData);
        }
Ejemplo n.º 28
0
            public void OnDrawFrame(Javax.Microedition.Khronos.Opengles.IGL10 glUnused)
            {
                if (_updateSurface)
                {
                    _surfaceTexture.UpdateTexImage();
                    _surfaceTexture.GetTransformMatrix(_STMatrix);
                    _updateSurface = false;
                }

                GLES20.GlUseProgram(0);
                GLES20.GlUseProgram(_glProgram);
                GLES20.GlActiveTexture(GLES20.GlTexture2);
                var tWidth  = _width;
                var tHeight = _height;

                funnyGhostEffectBuffer = ByteBuffer.AllocateDirect(tWidth * tHeight * 4);
                funnyGhostEffectBuffer.Order(ByteOrder.NativeOrder());
                funnyGhostEffectBuffer.Position(0);

                // Note that it is read in GlReadPixels in a different pixel order than top-left to lower-right, so it adds a reversed+mirror effect
                // when passed to TexImage2D to convert to texture.
                GLES20.GlReadPixels(0, 0, tWidth - 1, tHeight - 1, GLES20.GlRgba, GLES20.GlUnsignedByte, funnyGhostEffectBuffer);
                updateTargetTexture(tWidth, tHeight);
                GLES20.GlBindTexture(GLES20.GlTexture2d, _otherTextureId);
                GLES20.GlUniform1i(_otherTextureUniform, 2);

                GLES20.GlUseProgram(0);
                GLES20.GlUseProgram(_glProgram);
                GLES20.GlActiveTexture(GLES20.GlTexture1);
                GLES20.GlBindTexture(GLES11Ext.GlTextureExternalOes, _OESTextureId);
                GLES20.GlUniform1i(_OESTextureUniform, 1);

                _triangleVertices.Position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
                GLES20.GlVertexAttribPointer(_aPositionHandle, 3, GLES20.GlFloat, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, _triangleVertices);
                GLES20.GlEnableVertexAttribArray(_aPositionHandle);

                _textureVertices.Position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
                GLES20.GlVertexAttribPointer(_aTextureCoord, 2, GLES20.GlFloat, false, TEXTURE_VERTICES_DATA_STRIDE_BYTES, _textureVertices);
                GLES20.GlEnableVertexAttribArray(_aTextureCoord);

                Android.Opengl.Matrix.SetIdentityM(_MVPMatrix, 0);
                GLES20.GlUniformMatrix4fv(_uMVPMatrixHandle, 1, false, _MVPMatrix, 0);
                GLES20.GlUniformMatrix4fv(_uSTMatrixHandle, 1, false, _STMatrix, 0);

                GLES20.GlDrawArrays(GLES20.GlTriangleStrip, 0, 4);

                GLES20.GlFinish();
            }
Ejemplo n.º 29
0
            public VideoPreviewer(Context context, GlVideoView videoView)
            {
                _mediaPlayer      = new MediaPlayer();
                _videoView        = videoView;
                _triangleVertices = ByteBuffer.AllocateDirect(_triangleVerticesData.Length * FLOAT_SIZE_BYTES)
                                    .Order(ByteOrder.NativeOrder()).AsFloatBuffer();

                _triangleVertices.Put(_triangleVerticesData).Position(0);

                _textureVertices = ByteBuffer.AllocateDirect(_textureVerticesData.Length * FLOAT_SIZE_BYTES)
                                   .Order(ByteOrder.NativeOrder()).AsFloatBuffer();

                _textureVertices.Put(_textureVerticesData).Position(0);

                Android.Opengl.Matrix.SetIdentityM(_STMatrix, 0);
            }
Ejemplo n.º 30
0
        /**
         * Serializes current chunk instance to byte array. This array will pass thia check: NinePatch.isNinePatchChunk(byte[] chunk)
         *
         * @return The 9-patch data chunk describing how the underlying bitmap is split apart and drawn.
         */
        public byte[] toBytes()
        {
            int capacity   = 4 + (7 * 4) + xDivs.Count * 2 * 4 + yDivs.Count * 2 * 4 + colors.Length * 4;
            var byteBuffer = ByteBuffer.Allocate(capacity).Order(ByteOrder.NativeOrder());

            byteBuffer.Put(Integer.ValueOf(1).ByteValue());
            byteBuffer.Put(Integer.ValueOf(xDivs.Count * 2).ByteValue());
            byteBuffer.Put(Integer.ValueOf(yDivs.Count * 2).ByteValue());
            byteBuffer.Put(Integer.ValueOf(colors.Length).ByteValue());
            //Skip
            byteBuffer.PutInt(0);
            byteBuffer.PutInt(0);

            if (padding == null)
            {
                padding = new Rect();
            }
            byteBuffer.PutInt(padding.Left);
            byteBuffer.PutInt(padding.Right);
            byteBuffer.PutInt(padding.Top);
            byteBuffer.PutInt(padding.Bottom);

            //Skip
            byteBuffer.PutInt(0);

            foreach (Div div in xDivs)
            {
                byteBuffer.PutInt(div.start);
                byteBuffer.PutInt(div.stop);
            }
            foreach (Div div in yDivs)
            {
                byteBuffer.PutInt(div.start);
                byteBuffer.PutInt(div.stop);
            }
            foreach (int color in colors)
            {
                byteBuffer.PutInt(color);
            }

            var bytes = new byte[capacity];

            byteBuffer.Rewind();
            byteBuffer.Get(bytes);
            return(bytes);
        }