public Square () { // initialize vertex byte buffer for shape coordinates ByteBuffer bb = ByteBuffer.AllocateDirect ( // (# of coordinate values * 4 bytes per float) squareCoords.Length * 4); bb.Order (ByteOrder.NativeOrder ()); vertexBuffer = bb.AsFloatBuffer (); vertexBuffer.Put (squareCoords); vertexBuffer.Position (0); // initialize byte buffer for the draw list ByteBuffer dlb = ByteBuffer.AllocateDirect ( // (# of coordinate values * 2 bytes per short) drawOrder.Length * 2); dlb.Order (ByteOrder.NativeOrder ()); drawListBuffer = dlb.AsShortBuffer (); drawListBuffer.Put (drawOrder); drawListBuffer.Position (0); // prepare shaders and OpenGL program int vertexShader = MyGLRenderer.LoadShader (GLES20.GlVertexShader, vertexShaderCode); int fragmentShader = MyGLRenderer.LoadShader (GLES20.GlFragmentShader, fragmentShaderCode); mProgram = GLES20.GlCreateProgram (); // create empty OpenGL Program GLES20.GlAttachShader (mProgram, vertexShader); // add the vertex shader to program GLES20.GlAttachShader (mProgram, fragmentShader); // add the fragment shader to program GLES20.GlLinkProgram (mProgram); // create OpenGL program executables }
public MITCRender() { mTriangleBuffer = FloatbufferUtil(mTriangleArray); mColorBuffer = FloatbufferUtil(mColorArray); quateBuffer = FloatbufferUtil(mQuateArray); CMatrixMath.Matrix4fSetIdentity(ref m_Transform); }
public Triangle () { // initialize vertex byte buffer for shape coordinates ByteBuffer bb = ByteBuffer.AllocateDirect ( // (number of coordinate values * 4 bytes per float) triangleCoords.Length * 4); // use the device hardware's native byte order bb.Order (ByteOrder.NativeOrder ()); // create a floating point buffer from the ByteBuffer vertexBuffer = bb.AsFloatBuffer (); // add the coordinates to the FloatBuffer vertexBuffer.Put (triangleCoords); // set the buffer to read the first coordinate vertexBuffer.Position (0); // prepare shaders and OpenGL program int vertexShader = MyGLRenderer.LoadShader (GLES30.GlVertexShader, vertexShaderCode); int fragmentShader = MyGLRenderer.LoadShader (GLES30.GlFragmentShader, fragmentShaderCode); mProgram = GLES30.GlCreateProgram (); // create empty OpenGL Program GLES30.GlAttachShader (mProgram, vertexShader); // add the vertex shader to program GLES30.GlAttachShader (mProgram, fragmentShader); // add the fragment shader to program GLES30.GlLinkProgram (mProgram); // create OpenGL program executables }
public MainRenderer(MainView view) { mView = view; float[] vtmp = { 1.0f, -1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f }; float[] ttmp = { 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f }; pVertex = ByteBuffer.AllocateDirect(8 * 4).Order(ByteOrder.NativeOrder()).AsFloatBuffer(); pVertex.Put(vtmp); pVertex.Position(0); pTexCoord = ByteBuffer.AllocateDirect(8 * 4).Order(ByteOrder.NativeOrder()).AsFloatBuffer(); pTexCoord.Put(ttmp); pTexCoord.Position(0); }
public Cube() { ByteBuffer byteBuf = ByteBuffer.AllocateDirect(vertices.Length * 4); byteBuf.Order(ByteOrder.NativeOrder()); mVertexBuffer = byteBuf.AsFloatBuffer(); mVertexBuffer.Put(vertices); mVertexBuffer.Position(0); byteBuf = ByteBuffer.AllocateDirect(colors.Length * 4); byteBuf.Order(ByteOrder.NativeOrder()); mColorBuffer = byteBuf.AsFloatBuffer(); mColorBuffer.Put(colors); mColorBuffer.Position(0); mIndexBuffer = ByteBuffer.AllocateDirect(indices.Length); mIndexBuffer.Put(indices); mIndexBuffer.Position(0); }
public MyRenderer() { ByteBuffer bb = ByteBuffer.allocateDirect(mXYZCoords.Length * 4); bb.order(ByteOrder.nativeOrder()); mVertexBuffer = bb.asFloatBuffer(); mVertexBuffer.put(mXYZCoords); mVertexBuffer.position(0); ByteBuffer tb = ByteBuffer.allocateDirect(mUVCoords.Length * 4); tb.order(ByteOrder.nativeOrder()); mTextureBuffer = tb.asFloatBuffer(); mTextureBuffer.put(mUVCoords); mTextureBuffer.position(0); ByteBuffer dlb = ByteBuffer.allocateDirect(mVertexIndex.Length * 2); dlb.order(ByteOrder.nativeOrder()); mDrawListBuffer = dlb.asShortBuffer(); mDrawListBuffer.put(mVertexIndex); mDrawListBuffer.position(0); }
public Trajectory(int lineWidth) { mLineWidth = lineWidth; // Reset the model matrix to the identity Matrix.SetIdentityM(ModelMatrix, 0); // Allocate a vertex buffer ByteBuffer vertexByteBuffer = ByteBuffer.AllocateDirect(MAX_VERTICES * BYTES_PER_FLOAT); vertexByteBuffer.Order(ByteOrder.NativeOrder()); mVertexBuffer = vertexByteBuffer.AsFloatBuffer(); // Load the vertex and fragment shaders, then link the program int vertexShader = RenderUtils.loadShader(GLES20.GlVertexShader, mVertexShaderCode); int fragShader = RenderUtils.loadShader(GLES20.GlFragmentShader, mFragmentShaderCode); mProgram = GLES20.GlCreateProgram(); GLES20.GlAttachShader(mProgram, vertexShader); GLES20.GlAttachShader(mProgram, fragShader); GLES20.GlLinkProgram(mProgram); }
private void DrawTriangle(FloatBuffer aTriangleBuffer) { // Pass in the position information. aTriangleBuffer.Position(mPositionOffset); GLES20.GlVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GlFloat, false, mStrideBytes, aTriangleBuffer); GLES20.GlEnableVertexAttribArray(mPositionHandle); //Pass in the color information aTriangleBuffer.Position(mColorOffset); GLES20.GlVertexAttribPointer(mColorHandle, mColorDataSize, GLES20.GlFloat, false, mStrideBytes, aTriangleBuffer); GLES20.GlEnableVertexAttribArray(mColorHandle); Matrix.MultiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0); Matrix.MultiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0); GLES20.GlUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0); GLES20.GlDrawArrays(GLES20.GlTriangles, 0, 3); }
/// <summary> /// Converts a plane polygon from ARCore into a <see cref="Vector3"/> array. /// </summary> /// <param name="buffer">The float buffer containing 2D vertices of the polygon</param> /// <param name="waveVectorArray">The <see cref="Vector3"/> array with the 3D vertices of the polygon</param> public static void ToWave(this FloatBuffer buffer, ref Vector3[] waveVectorArray) { buffer.Rewind(); var boundaryVertices = buffer.Limit() / 2; if (waveVectorArray == null) { waveVectorArray = new Vector3[boundaryVertices]; } else if (waveVectorArray.Length != boundaryVertices) { Array.Resize(ref waveVectorArray, boundaryVertices); } for (int i = 0; i < boundaryVertices; i++) { waveVectorArray[i].X = buffer.Get(); waveVectorArray[i].Z = buffer.Get(); } }
public static FloatBuffer getDirectBuffer(int size, FloatBuffer buffer) { if (buffer == null) { return(buffer); } size = Round4(size); if (buffer.Direct) { buffer.limit((size >> 2) + buffer.position()); return(buffer); } FloatBuffer directBuffer = allocateDirectBuffer(size).asFloatBuffer(); directBuffer.put((FloatBuffer)((FloatBuffer)buffer).slice().limit(size >> 2)); directBuffer.rewind(); return(directBuffer); }
/// <summary> /// Adds the line. /// </summary> /// <param name="points">The points.</param> /// <param name="argb">ARGB.</param> public void AddLine(float[] points, float width, int argb) { // a float is 4 bytes, therefore we multiply the number if // vertices with 4. ByteBuffer vbb = ByteBuffer.AllocateDirect(points.Length * 4); vbb.Order(ByteOrder.NativeOrder()); FloatBuffer vertexBuffer = vbb.AsFloatBuffer(); vertexBuffer.Put(points); vertexBuffer.Position(0); LineProcessed line = new LineProcessed() { Vertices = vertexBuffer, Color = argb, Width = width, Count = points.Length / 3 }; _lines.Add(line); }
/** * Compute the intersections of a line with a collection of triangles. * * @param line the line to intersect. * @param vertices the triangles, arranged in a buffer as GL_TRIANGLES (9 floats per triangle). * * @return the list of intersections with the line and the triangles, or null if there are no intersections. * * @throws ArgumentException if the line or vertex buffer is null. */ public static List <Intersection> intersectTriangles(Line line, FloatBuffer vertices) { if (line == null) { string msg = Logging.getMessage("nullValue.LineIsNull"); Logging.logger().severe(msg); throw new ArgumentException(msg); } if (vertices == null) { string msg = Logging.getMessage("nullValue.BufferIsNull"); Logging.logger().severe(msg); throw new ArgumentException(msg); } List <Intersection> intersections = null; vertices.rewind(); while (vertices.limit() - vertices.position() >= 9) { Intersection intersection = intersect(line, vertices.get(), vertices.get(), vertices.get(), vertices.get(), vertices.get(), vertices.get(), vertices.get(), vertices.get(), vertices.get()); if (intersection != null) { if (intersections == null) { intersections = new List <Intersection>(); } intersections.Add(intersection); } } return(intersections); }
private void FindValidSkeletonPoints(ARBody arBody) { int index = 0; int[] isExists; int validPointNum = 0; float[] points; float[] skeletonPoints; // Determine whether the data returned by the algorithm is 3D human // skeleton data or 2D human skeleton data, and obtain valid skeleton points. if (arBody.CoordinateSystemType == ARCoordinateSystemType.CoordinateSystemType3dCamera) { isExists = arBody.GetSkeletonPointIsExist3D(); points = new float[isExists.Length * 3]; skeletonPoints = arBody.GetSkeletonPoint3D(); } else { isExists = arBody.GetSkeletonPointIsExist2D(); points = new float[isExists.Length * 3]; skeletonPoints = arBody.GetSkeletonPoint2D(); } // Save the three coordinates of each joint point(each point has three coordinates). for (int i = 0; i < isExists.Length; i++) { if (isExists[i] != 0) { points[index++] = skeletonPoints[3 * i]; points[index++] = skeletonPoints[3 * i + 1]; points[index++] = skeletonPoints[3 * i + 2]; validPointNum++; } } mSkeletonPoints = FloatBuffer.Wrap(points); mPointsNum = validPointNum; }
private void FindValidConnectionSkeletonLines(ARBody arBody) { mPointsLineNum = 0; int[] connections = arBody.GetBodySkeletonConnection(); float[] linePoints = new float[LINE_POINT_RATIO * connections.Length]; float[] coors; int[] isExists; if (arBody.CoordinateSystemType == ARCoordinateSystemType.CoordinateSystemType3dCamera) { coors = arBody.GetSkeletonPoint3D(); isExists = arBody.GetSkeletonPointIsExist3D(); } else { coors = arBody.GetSkeletonPoint2D(); isExists = arBody.GetSkeletonPointIsExist2D(); } // Filter out valid skeleton connection lines based on the returned results, // which consist of indexes of two ends, for example, [p0,p1;p0,p3;p0,p5;p1,p2]. // The loop takes out the 3D coordinates of the end points of the valid connection // line and saves them in sequence. for (int j = 0; j < connections.Length; j += 2) { if (isExists[connections[j]] != 0 && isExists[connections[j + 1]] != 0) { linePoints[mPointsLineNum * 3] = coors[3 * connections[j]]; linePoints[mPointsLineNum * 3 + 1] = coors[3 * connections[j] + 1]; linePoints[mPointsLineNum * 3 + 2] = coors[3 * connections[j] + 2]; linePoints[mPointsLineNum * 3 + 3] = coors[3 * connections[j + 1]]; linePoints[mPointsLineNum * 3 + 4] = coors[3 * connections[j + 1] + 1]; linePoints[mPointsLineNum * 3 + 5] = coors[3 * connections[j + 1] + 2]; mPointsLineNum += 2; } } mLinePoints = FloatBuffer.Wrap(linePoints); }
public NinePatch(object tx, int x, int y, int w, int h, int Left, int Top, int Right, int Bottom) : base(0, 0, 0, 0) { texture = TextureCache.Get(tx); w = w == 0 ? texture.Width : w; h = h == 0 ? texture.Height : h; nWidth = _Width = w; nHeight = _Height = h; vertices = new float[16]; verticesBuffer = Quad.CreateSet(9); marginLeft = Left; marginRight = Right; marginTop = Top; marginBottom = Bottom; outterF = texture.UvRect(x, y, x + w, y + h); innerF = texture.UvRect(x + Left, y + Top, x + w - Right, y + h - Bottom); UpdateVertices(); }
public void Init() { try { // Create program MProgram = GlToolbox.CreateProgram(VertexShader, FragmentShader); // Bind attributes and uniforms MTexSamplerHandle = GLES20.GlGetUniformLocation(MProgram, "tex_sampler"); MTexCoordHandle = GLES20.GlGetAttribLocation(MProgram, "a_texcoord"); MPosCoordHandle = GLES20.GlGetAttribLocation(MProgram, "a_position"); // Setup coordinate buffers MTexVertices = ByteBuffer.AllocateDirect(TexVertices.Length * FloatSizeBytes).Order(ByteOrder.NativeOrder()).AsFloatBuffer(); MTexVertices.Put(TexVertices).Position(0); MPosVertices = ByteBuffer.AllocateDirect(PosVertices.Length * FloatSizeBytes).Order(ByteOrder.NativeOrder()).AsFloatBuffer(); MPosVertices.Put(PosVertices).Position(0); } catch (Exception e) { Methods.DisplayReportResultTrack(e); } }
/** * Expands a buffer of indexed triangle strip vertices to a buffer of non-indexed general-triangle vertices. * * @param indices the triangle indices. * @param inBuf the vertex buffer the indices refer to, in the order x, y, z, x, y, z, ... * @param outBuf the buffer in which to place the expanded triangle vertices. The buffer must have a limit * sufficient to hold the output vertices. * * @throws ArgumentException if the index list or the input or output buffer is null, or if the output buffer * size is insufficient. */ public static void expandTriangleStrip(List <int> indices, FloatBuffer inBuf, FloatBuffer outBuf) { if (indices == null) { string msg = Logging.getMessage("nullValue.ListIsNull"); Logging.logger().severe(msg); throw new ArgumentException(msg); } if (inBuf == null || outBuf == null) { string msg = Logging.getMessage("nullValue.BufferIsNull"); Logging.logger().severe(msg); throw new ArgumentException(msg); } int nunTriangles = indices.Count - 2; if (nunTriangles * 3 * 3 > outBuf.limit() - outBuf.position()) { string msg = Logging.getMessage("generic.BufferSize", outBuf.limit() - outBuf.position()); Logging.logger().severe(msg); throw new ArgumentException(msg); } for (int i = 2; i < indices.Count; i++) { int k = indices[i - 2] * 3; outBuf.put(inBuf.get(k)).put(inBuf.get(k + 1)).put(inBuf.get(k + 2)); k = indices[i % 2 == 0 ? i : i - 1] * 3; outBuf.put(inBuf.get(k)).put(inBuf.get(k + 1)).put(inBuf.get(k + 2)); k = indices[i % 2 == 0 ? i - 1 : i] * 3; outBuf.put(inBuf.get(k)).put(inBuf.get(k + 1)).put(inBuf.get(k + 2)); } }
public void OnSurfaceCreated(Javax.Microedition.Khronos.Egl.EGLConfig config) { Log.Info(Tag, "onSurfaceCreated"); GLES20.GlClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well cubeVertices = PrepareBuffer(WorldLayoutData.CubeCoords); cubeColors = PrepareBuffer(WorldLayoutData.CubeColors); cubeFoundColors = PrepareBuffer(WorldLayoutData.CubeFoundColors); cubeNormals = PrepareBuffer(WorldLayoutData.CubeNormals); cubeTextureCoords = PrepareBuffer(WorldLayoutData.CubeTexCoords); floorVertices = PrepareBuffer(WorldLayoutData.FloorCoords); floorNormals = PrepareBuffer(WorldLayoutData.FloorNormals); floorColors = PrepareBuffer(WorldLayoutData.FloorColors); monkeyFound = LoadGlTexture(Resource.Drawable.texture2); monkeyNotFound = LoadGlTexture(Resource.Drawable.texture1); int vertexShader = LoadGlShader(GLES20.GlVertexShader, Resource.Raw.vertex); int gridShader = LoadGlShader(GLES20.GlFragmentShader, Resource.Raw.fragment); glProgram = GLES20.GlCreateProgram(); GLES20.GlAttachShader(glProgram, vertexShader); GLES20.GlAttachShader(glProgram, gridShader); GLES20.GlLinkProgram(glProgram); GLES20.GlEnable(GLES20.GlDepthTest); // Object first appears directly in front of user Matrix.SetIdentityM(modelCube, 0); Matrix.TranslateM(modelCube, 0, 0, 0, -mObjectDistance); Matrix.SetIdentityM(modelFloor, 0); Matrix.TranslateM(modelFloor, 0, 0, -mFloorDepth, 0); // Floor appears below user CheckGlError("onSurfaceCreated"); }
private void InitBuffers() { // Initialize the size of the vertex buffer. ByteBuffer byteBufferForVer = ByteBuffer.AllocateDirect(32); byteBufferForVer.Order(ByteOrder.NativeOrder()); mVerBuffer = byteBufferForVer.AsFloatBuffer(); mVerBuffer.Put(POS); mVerBuffer.Position(0); // Initialize the size of the texture buffer. ByteBuffer byteBufferForTex = ByteBuffer.AllocateDirect(32); byteBufferForTex.Order(ByteOrder.NativeOrder()); mTexBuffer = byteBufferForTex.AsFloatBuffer(); mTexBuffer.Put(COORD); mTexBuffer.Position(0); // Initialize the size of the transformed texture buffer. ByteBuffer byteBufferForTransformedTex = ByteBuffer.AllocateDirect(32); byteBufferForTransformedTex.Order(ByteOrder.NativeOrder()); mTexTransformedBuffer = byteBufferForTransformedTex.AsFloatBuffer(); }
public void Init() { // Create program _program = GLToolbox.CreateProgram (VertexShader, FragmentShader); // Bind attributes and uniforms _texSamplerHandle = GLES20.GlGetUniformLocation (_program, "tex_sampler"); _texCoordHandle = GLES20.GlGetAttribLocation (_program, "a_texcoord"); _posCoordHandle = GLES20.GlGetAttribLocation (_program, "a_position"); // Setup coordinate buffers _texVertices = ByteBuffer.AllocateDirect ( TexVertices.Length * FloatSizeBytes) .Order (ByteOrder.NativeOrder ()).AsFloatBuffer (); _texVertices.Put (TexVertices).Position (0); _posVertices = ByteBuffer.AllocateDirect ( PosVertices.Length * FloatSizeBytes) .Order (ByteOrder.NativeOrder ()).AsFloatBuffer (); _posVertices.Put (PosVertices).Position (0); }
/** * Updates the plane model transform matrix and extents. */ private void updatePlaneParameters(float[] planeMatrix, float extentX, float extentZ, FloatBuffer boundary) { Array.Copy(planeMatrix, 0, mModelMatrix, 0, 16); if (boundary == null) { mVertexBuffer.Limit(0); mIndexBuffer.Limit(0); return; } // Generate a new set of vertices and a corresponding triangle strip index set so that // the plane boundary polygon has a fading edge. This is done by making a copy of the // boundary polygon vertices and scaling it down around center to push it inwards. Then // the index buffer is setup accordingly. boundary.Rewind(); int boundaryVertices = boundary.Limit() / 2; int numVertices; int numIndices; numVertices = boundaryVertices * VERTS_PER_BOUNDARY_VERT; // drawn as GL_TRIANGLE_STRIP with 3n-2 triangles (n-2 for fill, 2n for perimeter). numIndices = boundaryVertices * INDICES_PER_BOUNDARY_VERT; if (mVertexBuffer.Capacity() < numVertices * COORDS_PER_VERTEX) { int size = mVertexBuffer.Capacity(); while (size < numVertices * COORDS_PER_VERTEX) { size *= 2; } mVertexBuffer = ByteBuffer.AllocateDirect(BYTES_PER_FLOAT * size) .Order(ByteOrder.NativeOrder()).AsFloatBuffer(); } mVertexBuffer.Rewind(); mVertexBuffer.Limit(numVertices * COORDS_PER_VERTEX); if (mIndexBuffer.Capacity() < numIndices) { int size = mIndexBuffer.Capacity(); while (size < numIndices) { size *= 2; } mIndexBuffer = ByteBuffer.AllocateDirect(BYTES_PER_SHORT * size) .Order(ByteOrder.NativeOrder()).AsShortBuffer(); } mIndexBuffer.Rewind(); mIndexBuffer.Limit(numIndices); // Note: when either dimension of the bounding box is smaller than 2*FADE_RADIUS_M we // generate a bunch of 0-area triangles. These don't get rendered though so it works // out ok. float xScale = Math.Max((extentX - 2 * FADE_RADIUS_M) / extentX, 0.0f); float zScale = Math.Max((extentZ - 2 * FADE_RADIUS_M) / extentZ, 0.0f); while (boundary.HasRemaining) { float x = boundary.Get(); float z = boundary.Get(); mVertexBuffer.Put(x); mVertexBuffer.Put(z); mVertexBuffer.Put(0.0f); mVertexBuffer.Put(x * xScale); mVertexBuffer.Put(z * zScale); mVertexBuffer.Put(1.0f); } // step 1, perimeter mIndexBuffer.Put((short)((boundaryVertices - 1) * 2)); for (int i = 0; i < boundaryVertices; ++i) { mIndexBuffer.Put((short)(i * 2)); mIndexBuffer.Put((short)(i * 2 + 1)); } mIndexBuffer.Put((short)1); // This leaves us on the interior edge of the perimeter between the inset vertices // for boundary verts n-1 and 0. // step 2, interior: for (int i = 1; i < boundaryVertices / 2; ++i) { mIndexBuffer.Put((short)((boundaryVertices - 1 - i) * 2 + 1)); mIndexBuffer.Put((short)(i * 2 + 1)); } if (boundaryVertices % 2 != 0) { mIndexBuffer.Put((short)((boundaryVertices / 2) * 2 + 1)); } }
public void onSurfaceCreated(javax.microedition.khronos.egl.EGLConfig value) { Console.WriteLine("enter AndroidCardboardExperiment onSurfaceCreated"); GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well. ByteBuffer bbVertices = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_COORDS.Length * 4); bbVertices.order(ByteOrder.nativeOrder()); cubeVertices = bbVertices.asFloatBuffer(); cubeVertices.put(WorldLayoutData.CUBE_COORDS); cubeVertices.position(0); ByteBuffer bbColors = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_COLORS.Length * 4); bbColors.order(ByteOrder.nativeOrder()); cubeColors = bbColors.asFloatBuffer(); cubeColors.put(WorldLayoutData.CUBE_COLORS); cubeColors.position(0); ByteBuffer bbFoundColors = ByteBuffer.allocateDirect( WorldLayoutData.CUBE_FOUND_COLORS.Length * 4); bbFoundColors.order(ByteOrder.nativeOrder()); cubeFoundColors = bbFoundColors.asFloatBuffer(); cubeFoundColors.put(WorldLayoutData.CUBE_FOUND_COLORS); cubeFoundColors.position(0); ByteBuffer bbNormals = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_NORMALS.Length * 4); bbNormals.order(ByteOrder.nativeOrder()); cubeNormals = bbNormals.asFloatBuffer(); cubeNormals.put(WorldLayoutData.CUBE_NORMALS); cubeNormals.position(0); // make a floor ByteBuffer bbFloorVertices = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_COORDS.Length * 4); bbFloorVertices.order(ByteOrder.nativeOrder()); floorVertices = bbFloorVertices.asFloatBuffer(); floorVertices.put(WorldLayoutData.FLOOR_COORDS); floorVertices.position(0); ByteBuffer bbFloorNormals = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_NORMALS.Length * 4); bbFloorNormals.order(ByteOrder.nativeOrder()); floorNormals = bbFloorNormals.asFloatBuffer(); floorNormals.put(WorldLayoutData.FLOOR_NORMALS); floorNormals.position(0); var fcolors = 0xA26D41; // rgb to float //[javac] return __Enumerable.<Float>AsEnumerable(__SZArrayEnumerator_1.<Float>Of(x)); //[javac] ^ //[javac] required: T#1[] //[javac] found: float[] //[javac] reason: actual argument float[] cannot be converted to Float[] by method invocation conversion // var FLOOR_COLORS = ( // from i in Enumerable.Range(0, 6) // select new float[] { 0xA2 / 1.0f, 0x6D / 1.0f, 0x41 / 1.0f, 1.0f } //).SelectMany(x => x).ToArray(); #region floorColors var FLOOR_COLORS = new float[4 * 6]; for (int i = 0; i < FLOOR_COLORS.Length; i += 4) { FLOOR_COLORS[i + 0] = 0xA2 / 100.0f; FLOOR_COLORS[i + 1] = 0x6D / 100.0f; FLOOR_COLORS[i + 2] = 0x41 / 100.0f; FLOOR_COLORS[i + 3] = 1.0f; } FloatBuffer floorColors; ByteBuffer bbFloorColors = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_COLORS.Length * 4); bbFloorColors.order(ByteOrder.nativeOrder()); floorColors = bbFloorColors.asFloatBuffer(); //floorColors.put(WorldLayoutData.FLOOR_COLORS); floorColors.put(FLOOR_COLORS); floorColors.position(0); #endregion #region loadGLShader Func <int, ScriptCoreLib.GLSL.Shader, int> loadGLShader = (type, xshader) => { var code = xshader.ToString(); int shader = GLES20.glCreateShader(type); GLES20.glShaderSource(shader, code); GLES20.glCompileShader(shader); // Get the compilation status. int[] compileStatus = new int[1]; GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); // If the compilation failed, delete the shader. if (compileStatus[0] == 0) { Console.WriteLine("Error compiling shader: " + GLES20.glGetShaderInfoLog(shader)); GLES20.glDeleteShader(shader); shader = 0; } if (shader == 0) { throw new Exception("Error creating shader."); } return(shader); }; #endregion int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, new AndroidCardboardExperiment.Shaders.light_vertexVertexShader()); int gridShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, new Shaders.xgrid_fragmentFragmentShader()); int passthroughShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, new AndroidCardboardExperiment.Shaders.passthrough_fragmentFragmentShader()); cubeProgram = GLES20.glCreateProgram(); GLES20.glAttachShader(cubeProgram, vertexShader); GLES20.glAttachShader(cubeProgram, passthroughShader); GLES20.glLinkProgram(cubeProgram); GLES20.glUseProgram(cubeProgram); checkGLError("Cube program"); cubePositionParam = GLES20.glGetAttribLocation(cubeProgram, "a_Position"); cubeNormalParam = GLES20.glGetAttribLocation(cubeProgram, "a_Normal"); cubeColorParam = GLES20.glGetAttribLocation(cubeProgram, "a_Color"); cubeModelParam = GLES20.glGetUniformLocation(cubeProgram, "u_Model"); cubeModelViewParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVMatrix"); cubeModelViewProjectionParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVP"); cubeLightPosParam = GLES20.glGetUniformLocation(cubeProgram, "u_LightPos"); GLES20.glEnableVertexAttribArray(cubePositionParam); GLES20.glEnableVertexAttribArray(cubeNormalParam); GLES20.glEnableVertexAttribArray(cubeColorParam); checkGLError("Cube program params"); floorProgram = GLES20.glCreateProgram(); GLES20.glAttachShader(floorProgram, vertexShader); GLES20.glAttachShader(floorProgram, gridShader); GLES20.glLinkProgram(floorProgram); GLES20.glUseProgram(floorProgram); checkGLError("Floor program"); floorModelParam = GLES20.glGetUniformLocation(floorProgram, "u_Model"); floorModelViewParam = GLES20.glGetUniformLocation(floorProgram, "u_MVMatrix"); floorModelViewProjectionParam = GLES20.glGetUniformLocation(floorProgram, "u_MVP"); floorLightPosParam = GLES20.glGetUniformLocation(floorProgram, "u_LightPos"); floorPositionParam = GLES20.glGetAttribLocation(floorProgram, "a_Position"); floorNormalParam = GLES20.glGetAttribLocation(floorProgram, "a_Normal"); floorColorParam = GLES20.glGetAttribLocation(floorProgram, "a_Color"); GLES20.glEnableVertexAttribArray(floorPositionParam); GLES20.glEnableVertexAttribArray(floorNormalParam); GLES20.glEnableVertexAttribArray(floorColorParam); checkGLError("Floor program params"); GLES20.glEnable(GLES20.GL_DEPTH_TEST); //GLES20.glEnable(GLES20.GL_FOG); checkGLError("onSurfaceCreated"); Console.WriteLine("exit AndroidCardboardExperiment onSurfaceCreated"); vFinishFrame = (com.google.vrtoolkit.cardboard.Viewport v) => { // GPU thread stops now.. FrameOne.Stop(); }; // I/System.Console(28103): CardboardForEdgeExperiment { ProcessorCount = 8, MODEL = SM-G925F, CurrentManagedThreadId = 11305, FrameCounter = 28, LastFrameMilliseconds = 40, codeFPS = 25.0, pitch = 1.579644, yaw = 1.6225219 } #region vNewFrame vNewFrame = (com.google.vrtoolkit.cardboard.HeadTransform headTransform) => { // http://stackoverflow.com/questions/11851343/raise-fps-on-android-tablet-above-60-for-opengl-game // http://gafferongames.com/game-physics/fix-your-timestep/ #region FrameWatch if (FrameWatch.ElapsedMilliseconds >= 1000) { var codeFPS = 1000.0 / FrameOne.ElapsedMilliseconds; // we now know how many frames did fit into it // need 60 or more! Console.WriteLine("CardboardForEdgeExperiment " + new { // static System.Environment.ProcessorCount, android.os.Build.MODEL, System.Environment.CurrentManagedThreadId, FrameCounter, // dynamic LastFrameMilliseconds = FrameOne.ElapsedMilliseconds, codeFPS, // very dynamic pitch, yaw }); // I/System.Console(28117): CardboardForEdgeExperiment { ProcessorCount = 2, MODEL = Nexus 9, CurrentManagedThreadId = 1647, FrameCounter = 60, LastFrameMilliseconds = 6, codeFPS = 166.66666666666666, pitch = 1.5978987, yaw = -2.0770574 } FrameWatch.Restart(); FrameCounter = 0; } #endregion // GPU thread starts now.. FrameOne.Restart(); FrameCounter++; //Console.WriteLine("AndroidCardboardExperiment onNewFrame"); headTransform.getHeadView(headView, 0); checkGLError("onReadyToDraw"); // I/System.Console(27769): CardboardForEdgeExperiment { FrameCounter = 61, LastFrameMilliseconds = 0, codeFPS = Infinity, CurrentManagedThreadId = 1637, ProcessorCount = 2, MODEL = Nexus 9 } // add placeholder slowdown //System.Threading.Thread.Sleep(5); // I/System.Console(27840): CardboardForEdgeExperiment { FrameCounter = 60, LastFrameMilliseconds = 6, codeFPS = 166.66666666666666, CurrentManagedThreadId = 1642, ProcessorCount = 2, MODEL = Nexus 9 } }; #endregion // if we define it here, we get to see it in vr... var modelCube = new float[16]; // I/System.Console(19917): CardboardForEdgeExperiment { ProcessorCount = 8, MODEL = SM-G925F, CurrentManagedThreadId = 9959, FrameCounter = 46, LastFrameMilliseconds = 6, codeFPS = 166.66666666666666, pitch = 0.9070491, yaw = -0.3660261 } #region vDrawEye vDrawEye = (com.google.vrtoolkit.cardboard.Eye eye) => { // VIDEO via "X:\util\android-sdk-windows\tools\ddms.bat" var camera = new float[16]; // static void setLookAtM(float[] rm, int rmOffset, float eyeX, float eyeY, float eyeZ, float centerX, float centerY, float centerZ, float upX, float upY, float upZ) // Build the camera matrix and apply it to the ModelView. Matrix.setLookAtM(camera, 0, 0.0f, 0.0f, CAMERA_Z, 0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); #region glClearColor // skybox/video instead? GLES20.glClearColor( 0x87 / 255f, 0xCE / 255f, 0xEB / 255f, 1.0f ); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); #endregion var view = new float[16]; // can we strafe? // Apply the eye transformation to the camera. Matrix.multiplyMM(view, 0, eye.getEyeView(), 0, camera, 0); // we tapped into it. this strafes ius! Matrix.translateM(view, 0, (float)Math.Sin(TotalTime.ElapsedMilliseconds * 0.0001f) * objectDistance * 2.5f, // up down //(float)Math.Sin(TotalTime.ElapsedMilliseconds * 0.001f) * floorDepth * 0.5f, (float)Math.Cos(TotalTime.ElapsedMilliseconds * 0.001f) * floorDepth * 0.1f, 0 ); // Set the position of the light Matrix.multiplyMV(lightPosInEyeSpace, 0, view, 0, LIGHT_POS_IN_WORLD_SPACE, 0); // Build the ModelView and ModelViewProjection matrices // for calculating cube position and light. float[] perspective = eye.getPerspective(Z_NEAR, Z_FAR); // just a buffer? var modelView = new float[16]; #region drawCube() Action <float, float, float> drawCube = (tx, ty, tz) => { #region isLookingAtObject Func <bool> isLookingAtObject = () => { float[] initVec = { 0, 0, 0, 1.0f }; float[] objPositionVec = new float[4]; // Convert object space to camera space. Use the headView from onNewFrame. Matrix.multiplyMM(modelView, 0, headView, 0, modelCube, 0); Matrix.multiplyMV(objPositionVec, 0, modelView, 0, initVec, 0); pitch = (float)Math.Atan2(objPositionVec[1], -objPositionVec[2]); yaw = (float)Math.Atan2(objPositionVec[0], -objPositionVec[2]); if (Math.Abs(pitch) < PITCH_LIMIT) { if (Math.Abs(yaw) < YAW_LIMIT) { return(true); } } return(false); }; #endregion // Object first appears directly in front of user. Matrix.setIdentityM(modelCube, 0); // cant see it? var scale = 5.0f; //Matrix.scaleM(modelCube, 0, scale, scale, scale); Matrix.translateM(modelCube, 0, tx, ty, tz); Matrix.multiplyMM(modelView, 0, view, 0, modelCube, 0); Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0); // public static void scaleM (float[] m, int mOffset, float x, float y, float z) // Build the Model part of the ModelView matrix. //Matrix.rotateM(modelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f); // cant see rotation? Matrix.rotateM(modelCube, 0, TotalTime.ElapsedMilliseconds * 0.01f, // upwards rot. //0.5f, 0f, // sideways, left to right 0.5f , 0.0f); // http://developer.android.com/reference/android/opengl/Matrix.html#translateM(float[], int, float, float, float) // the cube rotates in front of us. // do we need to use a special program to draw a cube? // how can we make it bigger? GLES20.glUseProgram(cubeProgram); GLES20.glUniform3fv(cubeLightPosParam, 1, lightPosInEyeSpace, 0); // Set the Model in the shader, used to calculate lighting GLES20.glUniformMatrix4fv(cubeModelParam, 1, false, modelCube, 0); // Set the ModelView in the shader, used to calculate lighting GLES20.glUniformMatrix4fv(cubeModelViewParam, 1, false, modelView, 0); // Set the position of the cube GLES20.glVertexAttribPointer(cubePositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, cubeVertices); // Set the ModelViewProjection matrix in the shader. GLES20.glUniformMatrix4fv(cubeModelViewProjectionParam, 1, false, modelViewProjection, 0); // Set the normal positions of the cube, again for shading GLES20.glVertexAttribPointer(cubeNormalParam, 3, GLES20.GL_FLOAT, false, 0, cubeNormals); #region cubeColors var cc = cubeColors; if (!isLookingAtObject()) { cc = cubeFoundColors; } GLES20.glVertexAttribPointer(cubeColorParam, 4, GLES20.GL_FLOAT, false, 0, cc); #endregion GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36); checkGLError("Drawing cube"); }; #endregion #region drawCube drawCube(0, objectDistance, objectDistance * -1.0f); drawCube(0, 0, objectDistance * -2.0f); // looks like an airstrip // low fps? //var endOfMatrix = 64; var endOfMatrix = 20; for (int i = -endOfMatrix; i < endOfMatrix; i++) { drawCube(objectDistance, -floorDepth, objectDistance * -2.0f * i); drawCube(-objectDistance, -floorDepth, objectDistance * -2.0f * i); drawCube(objectDistance * 0.5f, 0, objectDistance * -2.0f * i); drawCube(objectDistance * -0.5f, 0, objectDistance * -2.0f * i); } #endregion var modelFloor = new float[16]; Matrix.setIdentityM(modelFloor, 0); Matrix.translateM(modelFloor, 0, // the floor escapes! //TotalTime.ElapsedMilliseconds * 0.01f, 0, -floorDepth, 0); // Floor appears below user. // Set modelView for the floor, so we draw floor in the correct location Matrix.multiplyMM(modelView, 0, view, 0, modelFloor, 0); Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0); #region drawFloor // called by onDrawEye Action drawFloor = delegate { GLES20.glUseProgram(floorProgram); // Set ModelView, MVP, position, normals, and color. GLES20.glUniform3fv(floorLightPosParam, 1, lightPosInEyeSpace, 0); GLES20.glUniformMatrix4fv(floorModelParam, 1, false, modelFloor, 0); GLES20.glUniformMatrix4fv(floorModelViewParam, 1, false, modelView, 0); GLES20.glUniformMatrix4fv(floorModelViewProjectionParam, 1, false, modelViewProjection, 0); GLES20.glVertexAttribPointer(floorPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, floorVertices); GLES20.glVertexAttribPointer(floorNormalParam, 3, GLES20.GL_FLOAT, false, 0, floorNormals); GLES20.glVertexAttribPointer(floorColorParam, 4, GLES20.GL_FLOAT, false, 0, floorColors); GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6); checkGLError("drawing floor"); }; drawFloor(); #endregion }; #endregion }
/** * Creates the buffers we use to store information about the 3D world. * * OpenGL doesn't use Java arrays, but rather needs data in a format it can understand. * Hence we use ByteBuffers. */ public void OnSurfaceCreated (Javax.Microedition.Khronos.Egl.EGLConfig config) { Android.Util.Log.Info (TAG, "onSurfaceCreated"); GLES20.GlClearColor (0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well. var bbVertices = ByteBuffer.AllocateDirect (WorldLayoutData.CUBE_COORDS.Length * 4); bbVertices.Order (ByteOrder.NativeOrder ()); cubeVertices = bbVertices.AsFloatBuffer (); cubeVertices.Put (WorldLayoutData.CUBE_COORDS); cubeVertices.Position (0); var bbColors = ByteBuffer.AllocateDirect (WorldLayoutData.CUBE_COLORS.Length * 4); bbColors.Order (ByteOrder.NativeOrder ()); cubeColors = bbColors.AsFloatBuffer (); cubeColors.Put (WorldLayoutData.CUBE_COLORS); cubeColors.Position (0); var bbFoundColors = ByteBuffer.AllocateDirect (WorldLayoutData.CUBE_FOUND_COLORS.Length * 4); bbFoundColors.Order (ByteOrder.NativeOrder ()); cubeFoundColors = bbFoundColors.AsFloatBuffer (); cubeFoundColors.Put (WorldLayoutData.CUBE_FOUND_COLORS); cubeFoundColors.Position (0); var bbNormals = ByteBuffer.AllocateDirect (WorldLayoutData.CUBE_NORMALS.Length * 4); bbNormals.Order (ByteOrder.NativeOrder ()); cubeNormals = bbNormals.AsFloatBuffer (); cubeNormals.Put (WorldLayoutData.CUBE_NORMALS); cubeNormals.Position (0); // make a floor var bbFloorVertices = ByteBuffer.AllocateDirect (WorldLayoutData.FLOOR_COORDS.Length * 4); bbFloorVertices.Order (ByteOrder.NativeOrder ()); floorVertices = bbFloorVertices.AsFloatBuffer (); floorVertices.Put (WorldLayoutData.FLOOR_COORDS); floorVertices.Position (0); var bbFloorNormals = ByteBuffer.AllocateDirect (WorldLayoutData.FLOOR_NORMALS.Length * 4); bbFloorNormals.Order (ByteOrder.NativeOrder ()); floorNormals = bbFloorNormals.AsFloatBuffer (); floorNormals.Put (WorldLayoutData.FLOOR_NORMALS); floorNormals.Position (0); var bbFloorColors = ByteBuffer.AllocateDirect (WorldLayoutData.FLOOR_COLORS.Length * 4); bbFloorColors.Order (ByteOrder.NativeOrder ()); floorColors = bbFloorColors.AsFloatBuffer (); floorColors.Put (WorldLayoutData.FLOOR_COLORS); floorColors.Position (0); int vertexShader = loadGLShader (GLES20.GlVertexShader, Resource.Raw.light_vertex); int gridShader = loadGLShader (GLES20.GlFragmentShader, Resource.Raw.grid_fragment); int passthroughShader = loadGLShader (GLES20.GlFragmentShader, Resource.Raw.passthrough_fragment); cubeProgram = GLES20.GlCreateProgram (); GLES20.GlAttachShader (cubeProgram, vertexShader); GLES20.GlAttachShader (cubeProgram, passthroughShader); GLES20.GlLinkProgram (cubeProgram); GLES20.GlUseProgram (cubeProgram); CheckGLError ("Cube program"); cubePositionParam = GLES20.GlGetAttribLocation (cubeProgram, "a_Position"); cubeNormalParam = GLES20.GlGetAttribLocation (cubeProgram, "a_Normal"); cubeColorParam = GLES20.GlGetAttribLocation (cubeProgram, "a_Color"); cubeModelParam = GLES20.GlGetUniformLocation (cubeProgram, "u_Model"); cubeModelViewParam = GLES20.GlGetUniformLocation (cubeProgram, "u_MVMatrix"); cubeModelViewProjectionParam = GLES20.GlGetUniformLocation (cubeProgram, "u_MVP"); cubeLightPosParam = GLES20.GlGetUniformLocation (cubeProgram, "u_LightPos"); CheckGLError ("Cube program params"); floorProgram = GLES20.GlCreateProgram (); GLES20.GlAttachShader (floorProgram, vertexShader); GLES20.GlAttachShader (floorProgram, gridShader); GLES20.GlLinkProgram (floorProgram); GLES20.GlUseProgram (floorProgram); CheckGLError ("Floor program"); floorModelParam = GLES20.GlGetUniformLocation (floorProgram, "u_Model"); floorModelViewParam = GLES20.GlGetUniformLocation (floorProgram, "u_MVMatrix"); floorModelViewProjectionParam = GLES20.GlGetUniformLocation (floorProgram, "u_MVP"); floorLightPosParam = GLES20.GlGetUniformLocation (floorProgram, "u_LightPos"); floorPositionParam = GLES20.GlGetAttribLocation (floorProgram, "a_Position"); floorNormalParam = GLES20.GlGetAttribLocation (floorProgram, "a_Normal"); floorColorParam = GLES20.GlGetAttribLocation (floorProgram, "a_Color"); CheckGLError ("Floor program params"); Matrix.SetIdentityM (modelFloor, 0); Matrix.TranslateM (modelFloor, 0, 0, -floorDepth, 0); // Floor appears below user. // Avoid any delays during start-up due to decoding of sound files. System.Threading.Tasks.Task.Run (() => { // Start spatial audio playback of SOUND_FILE at the model postion. The returned //soundId handle is stored and allows for repositioning the sound object whenever // the cube position changes. gvrAudioEngine.PreloadSoundFile (SOUND_FILE); soundId = gvrAudioEngine.CreateSoundObject (SOUND_FILE); gvrAudioEngine.SetSoundObjectPosition ( soundId, modelPosition [0], modelPosition [1], modelPosition [2]); gvrAudioEngine.PlaySound (soundId, true /* looped playback */); }); UpdateModelPosition (); CheckGLError ("onSurfaceCreated"); }
public LessonOneRenderer() { this.gl = (ScriptCoreLib.JavaScript.WebGL.WebGLRenderingContext)(object) __gl; #region Define points for equilateral triangles. // This triangle is red, green, and blue. float[] triangle1VerticesData = { // X, Y, Z, // R, G, B, A -0.5f, -0.25f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.5f, -0.25f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.559016994f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f }; // This triangle is yellow, cyan, and magenta. float[] triangle2VerticesData = { // X, Y, Z, // R, G, B, A -0.5f, -0.25f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, 0.5f, -0.25f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.559016994f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f }; // This triangle is white, gray, and black. float[] triangle3VerticesData = { // X, Y, Z, // R, G, B, A -0.5f, -0.25f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.5f, -0.25f, 0.0f, 0.5f, 0.5f, 0.5f, 1.0f, 0.0f, 0.559016994f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f }; #endregion // Initialize the buffers. mTriangle1Vertices = ByteBuffer.allocateDirect(triangle1VerticesData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mTriangle2Vertices = ByteBuffer.allocateDirect(triangle2VerticesData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mTriangle3Vertices = ByteBuffer.allocateDirect(triangle3VerticesData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mTriangle1Vertices.put(triangle1VerticesData).position(0); mTriangle2Vertices.put(triangle2VerticesData).position(0); mTriangle3Vertices.put(triangle3VerticesData).position(0); }
/// <summary> /// Maximum number curl can be divided into. The bigger the value /// the smoother curl will be.With the cost of having more /// polygons for drawing. /// </summary> /// <param name="maxCurlSplits"></param> public CurlMesh(int maxCurlSplits) { // There really is no use for 0 splits. mMaxCurlSplits = maxCurlSplits < 1 ? 1 : maxCurlSplits; mArrScanLines = new Array <Double>(maxCurlSplits + 2); mArrOutputVertices = new Array <Vertex>(7); mArrRotatedVertices = new Array <Vertex>(4); mArrIntersections = new Array <Vertex>(2); mArrTempVertices = new Array <Vertex>(7 + 4); for (int i = 0; i < 7 + 4; ++i) { mArrTempVertices.Add(new Vertex()); } if (DRAW_SHADOW) { mArrSelfShadowVertices = new Array <ShadowVertex>( (mMaxCurlSplits + 2) * 2); mArrDropShadowVertices = new Array <ShadowVertex>( (mMaxCurlSplits + 2) * 2); mArrTempShadowVertices = new Array <ShadowVertex>( (mMaxCurlSplits + 2) * 2); for (int i = 0; i < (mMaxCurlSplits + 2) * 2; ++i) { mArrTempShadowVertices.Add(new ShadowVertex()); } } // Rectangle consists of 4 vertices. Index 0 = top-left, index 1 = // bottom-left, index 2 = top-right and index 3 = bottom-right. for (int i = 0; i < 4; ++i) { mRectangle[i] = new Vertex(); } // Set up shadow penumbra direction to each vertex. We do fake 'self // shadow' calculations based on this information. mRectangle[0].mPenumbraX = mRectangle[1].mPenumbraX = mRectangle[1].mPenumbraY = mRectangle[3].mPenumbraY = -1; mRectangle[0].mPenumbraY = mRectangle[2].mPenumbraX = mRectangle[2].mPenumbraY = mRectangle[3].mPenumbraX = 1; if (DRAW_CURL_POSITION) { mCurlPositionLinesCount = 3; ByteBuffer hvbb = ByteBuffer .AllocateDirect(mCurlPositionLinesCount * 2 * 2 * 4); hvbb.Order(ByteOrder.NativeOrder()); mBufCurlPositionLines = hvbb.AsFloatBuffer(); mBufCurlPositionLines.Position(0); } // There are 4 vertices from bounding rect, max 2 from adding split line // to two corners and curl consists of max mMaxCurlSplits lines each // outputting 2 vertices. int maxVerticesCount = 4 + 2 + (2 * mMaxCurlSplits); ByteBuffer vbb = ByteBuffer.AllocateDirect(maxVerticesCount * 3 * 4); vbb.Order(ByteOrder.NativeOrder()); mBufVertices = vbb.AsFloatBuffer(); mBufVertices.Position(0); if (DRAW_TEXTURE) { ByteBuffer tbb = ByteBuffer .AllocateDirect(maxVerticesCount * 2 * 4); tbb.Order(ByteOrder.NativeOrder()); mBufTexCoords = tbb.AsFloatBuffer(); mBufTexCoords.Position(0); } ByteBuffer cbb = ByteBuffer.AllocateDirect(maxVerticesCount * 4 * 4); cbb.Order(ByteOrder.NativeOrder()); mBufColors = cbb.AsFloatBuffer(); mBufColors.Position(0); if (DRAW_SHADOW) { int maxShadowVerticesCount = (mMaxCurlSplits + 2) * 2 * 2; ByteBuffer scbb = ByteBuffer .AllocateDirect(maxShadowVerticesCount * 4 * 4); scbb.Order(ByteOrder.NativeOrder()); mBufShadowColors = scbb.AsFloatBuffer(); mBufShadowColors.Position(0); ByteBuffer sibb = ByteBuffer .AllocateDirect(maxShadowVerticesCount * 3 * 4); sibb.Order(ByteOrder.NativeOrder()); mBufShadowVertices = sibb.AsFloatBuffer(); mBufShadowVertices.Position(0); mDropShadowCount = mSelfShadowCount = 0; } }
public void Initialize () { GLES20.GlClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well cubeVertices = PrepareBuffer (WorldLayoutData.CubeCoords); cubeNormals = PrepareBuffer (WorldLayoutData.CubeNormals); cubeTextureCoords = PrepareBuffer (WorldLayoutData.CubeTexCoords); floorVertices = PrepareBuffer (WorldLayoutData.FloorCoords); floorNormals = PrepareBuffer (WorldLayoutData.FloorNormals); floorColors = PrepareBuffer (WorldLayoutData.FloorColors); monkeyFound = DrawingUtils.LoadGlTexture (resources, Resource.Drawable.texture2); monkeyNotFound = DrawingUtils.LoadGlTexture (resources, Resource.Drawable.texture1); int vertexShader = DrawingUtils.LoadGlShader(GLES20.GlVertexShader, resources, Resource.Raw.vertex); int gridShader = DrawingUtils.LoadGlShader(GLES20.GlFragmentShader, resources, Resource.Raw.fragment); glProgram = GLES20.GlCreateProgram(); GLES20.GlAttachShader(glProgram, vertexShader); GLES20.GlAttachShader(glProgram, gridShader); GLES20.GlLinkProgram(glProgram); GLES20.GlEnable(GLES20.GlDepthTest); // Object first appears directly in front of user Matrix.SetIdentityM(modelCube, 0); Matrix.TranslateM(modelCube, 0, 0, 0, -mObjectDistance); Matrix.SetIdentityM(modelFloor, 0); Matrix.TranslateM(modelFloor, 0, 0, -mFloorDepth, 0); // Floor appears below user CheckGlError("onSurfaceCreated"); }
//script: error JSC1000: Java : Opcode not implemented: stelem.r4 at AndroidOpenGLESLesson5Activity.Activities.AndroidOpenGLESLesson5Activity+LessonFiveRenderer+<>c.<.ctor>b__17_0 public LessonFiveRenderer(Context activityContext) { this.gl = (ScriptCoreLib.JavaScript.WebGL.WebGLRenderingContext)(object) __gl; mActivityContext = activityContext; #region generateCubeData Func <f[], f[], f[], f[], f[], f[], f[], f[], int, f[]> generateCubeData = (f[] point1, f[] point2, f[] point3, f[] point4, f[] point5, f[] point6, f[] point7, f[] point8, int elementsPerPoint) => { // Given a cube with the points defined as follows: // front left top, front right top, front left bottom, front right bottom, // back left top, back right top, back left bottom, back right bottom, // return an array of 6 sides, 2 triangles per side, 3 vertices per triangle, and 4 floats per vertex. int FRONT = 0; int RIGHT = 1; int BACK = 2; int LEFT = 3; int TOP = 4; int BOTTOM = 5; int size = elementsPerPoint * 6 * 6; float[] cubeData = new float[size]; for (int face = 0; face < 6; face++) { // Relative to the side, p1 = top left, p2 = top right, p3 = bottom left, p4 = bottom right float[] p1, p2, p3, p4; // Select the points for this face if (face == FRONT) { p1 = point1; p2 = point2; p3 = point3; p4 = point4; } else if (face == RIGHT) { p1 = point2; p2 = point6; p3 = point4; p4 = point8; } else if (face == BACK) { p1 = point6; p2 = point5; p3 = point8; p4 = point7; } else if (face == LEFT) { p1 = point5; p2 = point1; p3 = point7; p4 = point3; } else if (face == TOP) { p1 = point5; p2 = point6; p3 = point1; p4 = point2; } else // if (side == BOTTOM) { p1 = point8; p2 = point7; p3 = point4; p4 = point3; } // In OpenGL counter-clockwise winding is default. This means that when we look at a triangle, // if the points are counter-clockwise we are looking at the "front". If not we are looking at // the back. OpenGL has an optimization where all back-facing triangles are culled, since they // usually represent the backside of an object and aren't visible anyways. // Build the triangles // 1---3,6 // | / | // 2,4--5 int offset = face * elementsPerPoint * 6; for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p1[i]; } for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p3[i]; } for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p2[i]; } for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p3[i]; } for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p4[i]; } for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p2[i]; } } return(cubeData); }; #endregion // Define points for a cube. // X, Y, Z float[] p1p = { -1.0f, 1.0f, 1.0f }; float[] p2p = { 1.0f, 1.0f, 1.0f }; float[] p3p = { -1.0f, -1.0f, 1.0f }; float[] p4p = { 1.0f, -1.0f, 1.0f }; float[] p5p = { -1.0f, 1.0f, -1.0f }; float[] p6p = { 1.0f, 1.0f, -1.0f }; float[] p7p = { -1.0f, -1.0f, -1.0f }; float[] p8p = { 1.0f, -1.0f, -1.0f }; float[] cubePositionData = generateCubeData(p1p, p2p, p3p, p4p, p5p, p6p, p7p, p8p, p1p.Length); // Points of the cube: color information // R, G, B, A float[] p1c = { 1.0f, 0.0f, 0.0f, 1.0f }; // red float[] p2c = { 1.0f, 0.0f, 1.0f, 1.0f }; // magenta float[] p3c = { 0.0f, 0.0f, 0.0f, 1.0f }; // black float[] p4c = { 0.0f, 0.0f, 1.0f, 1.0f }; // blue float[] p5c = { 1.0f, 1.0f, 0.0f, 1.0f }; // yellow float[] p6c = { 1.0f, 1.0f, 1.0f, 1.0f }; // white float[] p7c = { 0.0f, 1.0f, 0.0f, 1.0f }; // green float[] p8c = { 0.0f, 1.0f, 1.0f, 1.0f }; // cyan float[] cubeColorData = generateCubeData(p1c, p2c, p3c, p4c, p5c, p6c, p7c, p8c, p1c.Length); // Initialize the buffers. mCubePositions = ByteBuffer.allocateDirect(cubePositionData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mCubePositions.put(cubePositionData).position(0); mCubeColors = ByteBuffer.allocateDirect(cubeColorData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mCubeColors.put(cubeColorData).position(0); }
/** * Allocates a new direct {@link java.nio.FloatBuffer} of the specified size, in chars. * * @param size the new FloatBuffer's size. * @param allocateDirect true to allocate and return a direct buffer, false to allocate and return a non-direct * buffer. * * @return the new buffer. * * @throws ArgumentException if size is negative. */ public static FloatBuffer newFloatBuffer(int size, bool allocateDirect) { if (size < 0) { String message = Logging.getMessage("generic.SizeOutOfRange", size); Logging.logger().severe(message); throw new ArgumentException(message); } return(allocateDirect ? newDirectByteBuffer(SIZEOF_FLOAT * size).asFloatBuffer() : FloatBuffer.allocate(size)); }
public override FloatBuffer Put(FloatBuffer src) { throw new ReadOnlyBufferException(); }
/** * Creates the buffers we use to store information about the 3D world. * * OpenGL doesn't use Java arrays, but rather needs data in a format it can understand. * Hence we use ByteBuffers. */ public void OnSurfaceCreated(Javax.Microedition.Khronos.Egl.EGLConfig config) { Android.Util.Log.Info(TAG, "onSurfaceCreated"); GLES20.GlClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well. var bbVertices = ByteBuffer.AllocateDirect(WorldLayoutData.CUBE_COORDS.Length * 4); bbVertices.Order(ByteOrder.NativeOrder()); cubeVertices = bbVertices.AsFloatBuffer(); cubeVertices.Put(WorldLayoutData.CUBE_COORDS); cubeVertices.Position(0); var bbColors = ByteBuffer.AllocateDirect(WorldLayoutData.CUBE_COLORS.Length * 4); bbColors.Order(ByteOrder.NativeOrder()); cubeColors = bbColors.AsFloatBuffer(); cubeColors.Put(WorldLayoutData.CUBE_COLORS); cubeColors.Position(0); var bbFoundColors = ByteBuffer.AllocateDirect(WorldLayoutData.CUBE_FOUND_COLORS.Length * 4); bbFoundColors.Order(ByteOrder.NativeOrder()); cubeFoundColors = bbFoundColors.AsFloatBuffer(); cubeFoundColors.Put(WorldLayoutData.CUBE_FOUND_COLORS); cubeFoundColors.Position(0); var bbNormals = ByteBuffer.AllocateDirect(WorldLayoutData.CUBE_NORMALS.Length * 4); bbNormals.Order(ByteOrder.NativeOrder()); cubeNormals = bbNormals.AsFloatBuffer(); cubeNormals.Put(WorldLayoutData.CUBE_NORMALS); cubeNormals.Position(0); // make a floor var bbFloorVertices = ByteBuffer.AllocateDirect(WorldLayoutData.FLOOR_COORDS.Length * 4); bbFloorVertices.Order(ByteOrder.NativeOrder()); floorVertices = bbFloorVertices.AsFloatBuffer(); floorVertices.Put(WorldLayoutData.FLOOR_COORDS); floorVertices.Position(0); var bbFloorNormals = ByteBuffer.AllocateDirect(WorldLayoutData.FLOOR_NORMALS.Length * 4); bbFloorNormals.Order(ByteOrder.NativeOrder()); floorNormals = bbFloorNormals.AsFloatBuffer(); floorNormals.Put(WorldLayoutData.FLOOR_NORMALS); floorNormals.Position(0); var bbFloorColors = ByteBuffer.AllocateDirect(WorldLayoutData.FLOOR_COLORS.Length * 4); bbFloorColors.Order(ByteOrder.NativeOrder()); floorColors = bbFloorColors.AsFloatBuffer(); floorColors.Put(WorldLayoutData.FLOOR_COLORS); floorColors.Position(0); int vertexShader = loadGLShader(GLES20.GlVertexShader, Resource.Raw.light_vertex); int gridShader = loadGLShader(GLES20.GlFragmentShader, Resource.Raw.grid_fragment); int passthroughShader = loadGLShader(GLES20.GlFragmentShader, Resource.Raw.passthrough_fragment); cubeProgram = GLES20.GlCreateProgram(); GLES20.GlAttachShader(cubeProgram, vertexShader); GLES20.GlAttachShader(cubeProgram, passthroughShader); GLES20.GlLinkProgram(cubeProgram); GLES20.GlUseProgram(cubeProgram); CheckGLError("Cube program"); cubePositionParam = GLES20.GlGetAttribLocation(cubeProgram, "a_Position"); cubeNormalParam = GLES20.GlGetAttribLocation(cubeProgram, "a_Normal"); cubeColorParam = GLES20.GlGetAttribLocation(cubeProgram, "a_Color"); cubeModelParam = GLES20.GlGetUniformLocation(cubeProgram, "u_Model"); cubeModelViewParam = GLES20.GlGetUniformLocation(cubeProgram, "u_MVMatrix"); cubeModelViewProjectionParam = GLES20.GlGetUniformLocation(cubeProgram, "u_MVP"); cubeLightPosParam = GLES20.GlGetUniformLocation(cubeProgram, "u_LightPos"); CheckGLError("Cube program params"); floorProgram = GLES20.GlCreateProgram(); GLES20.GlAttachShader(floorProgram, vertexShader); GLES20.GlAttachShader(floorProgram, gridShader); GLES20.GlLinkProgram(floorProgram); GLES20.GlUseProgram(floorProgram); CheckGLError("Floor program"); floorModelParam = GLES20.GlGetUniformLocation(floorProgram, "u_Model"); floorModelViewParam = GLES20.GlGetUniformLocation(floorProgram, "u_MVMatrix"); floorModelViewProjectionParam = GLES20.GlGetUniformLocation(floorProgram, "u_MVP"); floorLightPosParam = GLES20.GlGetUniformLocation(floorProgram, "u_LightPos"); floorPositionParam = GLES20.GlGetAttribLocation(floorProgram, "a_Position"); floorNormalParam = GLES20.GlGetAttribLocation(floorProgram, "a_Normal"); floorColorParam = GLES20.GlGetAttribLocation(floorProgram, "a_Color"); CheckGLError("Floor program params"); Matrix.SetIdentityM(modelFloor, 0); Matrix.TranslateM(modelFloor, 0, 0, -floorDepth, 0); // Floor appears below user. // Avoid any delays during start-up due to decoding of sound files. System.Threading.Tasks.Task.Run(() => { // Start spatial audio playback of SOUND_FILE at the model postion. The returned //soundId handle is stored and allows for repositioning the sound object whenever // the cube position changes. gvrAudioEngine.PreloadSoundFile(SOUND_FILE); soundId = gvrAudioEngine.CreateSoundObject(SOUND_FILE); gvrAudioEngine.SetSoundObjectPosition( soundId, modelPosition [0], modelPosition [1], modelPosition [2]); gvrAudioEngine.PlaySound(soundId, true /* looped playback */); }); UpdateModelPosition(); CheckGLError("onSurfaceCreated"); }
// Draw |textures| using |vertices| (X,Y coordinates). private void drawRectangle(int[] textures, FloatBuffer vertices) { for (int i = 0; i < 3; ++i) { GLES20.GlActiveTexture(GLES20.GlTexture0 + i); GLES20.GlBindTexture(GLES20.GlTexture2d, textures[i]); } GLES20.GlVertexAttribPointer(posLocation, 2, GLES20.GlFloat, false, 0, vertices); GLES20.GlEnableVertexAttribArray(posLocation); GLES20.GlDrawArrays(GLES20.GlTriangleStrip, 0, 4); checkNoGLES2Error(); }
/** * Retrieves the coordinates of vertices in this geometry. * * @param buffer Buffer to receive coordinates. */ public void getVertices(FloatBuffer buffer) { this.getFloatFromAccessor(buffer, this.getVertexAccessor(), "VERTEX", COORDS_PER_VERTEX); }
public override FloatBuffer put(FloatBuffer prm1) { return(default(FloatBuffer)); }
/** * Retrieves normal vectors in this geometry. * * @param buffer Buffer to receive coordinates. */ public void getNormals(FloatBuffer buffer) { this.getFloatFromAccessor(buffer, this.getNormalAccessor(), "NORMAL", COORDS_PER_VERTEX); }
public LessonTwoRenderer() { this.gl = (ScriptCoreLib.JavaScript.WebGL.WebGLRenderingContext)(object) __gl; #region Define points for a cube. // X, Y, Z float[] cubePositionData = { // In OpenGL counter-clockwise winding is default. This means that when we look at a triangle, // if the points are counter-clockwise we are looking at the "front". If not we are looking at // the back. OpenGL has an optimization where all back-facing triangles are culled, since they // usually represent the backside of an object and aren't visible anyways. // Front face -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, // Right face 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, // Back face 1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, // Left face -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, // Top face -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, // Bottom face 1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, }; // R, G, B, A float[] cubeColorData = { // Front face (red) 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, // Right face (green) 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, // Back face (blue) 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // Left face (yellow) 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, // Top face (cyan) 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, // Bottom face (magenta) 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f }; // X, Y, Z // The normal is used in light calculations and is a vector which points // orthogonal to the plane of the surface. For a cube model, the normals // should be orthogonal to the points of each face. float[] cubeNormalData = { // Front face 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, // Right face 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // Back face 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, // Left face -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, // Top face 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, // Bottom face 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f }; #endregion // Initialize the buffers. mCubePositions = ByteBuffer.allocateDirect(cubePositionData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mCubePositions.put(cubePositionData).position(0); mCubeColors = ByteBuffer.allocateDirect(cubeColorData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mCubeColors.put(cubeColorData).position(0); mCubeNormals = ByteBuffer.allocateDirect(cubeNormalData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mCubeNormals.put(cubeNormalData).position(0); }
/** * Initialize the model data. */ public LessonFourRenderer(Context activityContext) { mActivityContext = activityContext; // Define points for a cube. // X, Y, Z float[] cubePositionData = { // In OpenGL counter-clockwise winding is default. This means that when we look at a triangle, // if the points are counter-clockwise we are looking at the "front". If not we are looking at // the back. OpenGL has an optimization where all back-facing triangles are culled, since they // usually represent the backside of an object and aren't visible anyways. // Front face -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, // Right face 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, // Back face 1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, // Left face -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, // Top face -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, // Bottom face 1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, }; // R, G, B, A float[] cubeColorData = { // Front face (red) 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, // Right face (green) 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, // Back face (blue) 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // Left face (yellow) 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, // Top face (cyan) 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, // Bottom face (magenta) 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f }; // X, Y, Z // The normal is used in light calculations and is a vector which points // orthogonal to the plane of the surface. For a cube model, the normals // should be orthogonal to the points of each face. float[] cubeNormalData = { // Front face 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, // Right face 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // Back face 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, // Left face -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, // Top face 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, // Bottom face 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f }; // S, T (or X, Y) // Texture coordinate data. // Because images have a Y axis pointing downward (values increase as you move down the image) while // OpenGL has a Y axis pointing upward, we adjust for that here by flipping the Y axis. // What's more is that the texture coordinates are the same for every face. float[] cubeTextureCoordinateData = { // Front face 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, // Right face 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, // Back face 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, // Left face 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, // Top face 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, // Bottom face 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f }; // Initialize the buffers. mCubePositions = ByteBuffer.allocateDirect(cubePositionData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mCubePositions.put(cubePositionData).position(0); mCubeColors = ByteBuffer.allocateDirect(cubeColorData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mCubeColors.put(cubeColorData).position(0); mCubeNormals = ByteBuffer.allocateDirect(cubeNormalData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mCubeNormals.put(cubeNormalData).position(0); mCubeTextureCoordinates = ByteBuffer.allocateDirect(cubeTextureCoordinateData.Length * mBytesPerFloat) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mCubeTextureCoordinates.put(cubeTextureCoordinateData).position(0); }
/** * Creates and initializes OpenGL resources needed for rendering the model. * * @param context Context for loading the shader and below-named model and texture assets. * @param objAssetName Name of the OBJ file containing the model geometry. * @param diffuseTextureAssetName Name of the PNG file containing the diffuse texture map. */ public void CreateOnGlThread(Context context, string objAssetName, string diffuseTextureAssetName) { // Read the texture. var textureBitmap = BitmapFactory.DecodeStream(context.Assets.Open(diffuseTextureAssetName)); GLES20.GlActiveTexture(GLES20.GlTexture0); GLES20.GlGenTextures(mTextures.Length, mTextures, 0); GLES20.GlBindTexture(GLES20.GlTexture2d, mTextures[0]); GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMinFilter, GLES20.GlLinearMipmapLinear); GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMagFilter, GLES20.GlLinear); GLUtils.TexImage2D(GLES20.GlTexture2d, 0, textureBitmap, 0); GLES20.GlGenerateMipmap(GLES20.GlTexture2d); GLES20.GlBindTexture(GLES20.GlTexture2d, 0); textureBitmap.Recycle(); ShaderUtil.CheckGLError(TAG, "Texture loading"); // Read the obj file. var objInputStream = context.Assets.Open(objAssetName); var obj = ObjReader.Read(objInputStream); // Prepare the Obj so that its structure is suitable for // rendering with OpenGL: // 1. Triangulate it // 2. Make sure that texture coordinates are not ambiguous // 3. Make sure that normals are not ambiguous // 4. Convert it to single-indexed data obj = ObjUtils.ConvertToRenderable(obj); // OpenGL does not use Java arrays. ByteBuffers are used instead to provide data in a format // that OpenGL understands. // Obtain the data from the OBJ, as direct buffers: IntBuffer wideIndices = ObjData.GetFaceVertexIndices(obj, 3); FloatBuffer vertices = ObjData.GetVertices(obj); FloatBuffer texCoords = ObjData.GetTexCoords(obj, 2); FloatBuffer normals = ObjData.GetNormals(obj); // Convert int indices to shorts for GL ES 2.0 compatibility ShortBuffer indices = ByteBuffer.AllocateDirect(2 * wideIndices.Limit()) .Order(ByteOrder.NativeOrder()).AsShortBuffer(); while (wideIndices.HasRemaining) { indices.Put((short)wideIndices.Get()); } indices.Rewind(); var buffers = new int[2]; GLES20.GlGenBuffers(2, buffers, 0); mVertexBufferId = buffers[0]; mIndexBufferId = buffers[1]; // Load vertex buffer mVerticesBaseAddress = 0; mTexCoordsBaseAddress = mVerticesBaseAddress + 4 * vertices.Limit(); mNormalsBaseAddress = mTexCoordsBaseAddress + 4 * texCoords.Limit(); int totalBytes = mNormalsBaseAddress + 4 * normals.Limit(); GLES20.GlBindBuffer(GLES20.GlArrayBuffer, mVertexBufferId); GLES20.GlBufferData(GLES20.GlArrayBuffer, totalBytes, null, GLES20.GlStaticDraw); GLES20.GlBufferSubData( GLES20.GlArrayBuffer, mVerticesBaseAddress, 4 * vertices.Limit(), vertices); GLES20.GlBufferSubData( GLES20.GlArrayBuffer, mTexCoordsBaseAddress, 4 * texCoords.Limit(), texCoords); GLES20.GlBufferSubData( GLES20.GlArrayBuffer, mNormalsBaseAddress, 4 * normals.Limit(), normals); GLES20.GlBindBuffer(GLES20.GlArrayBuffer, 0); // Load index buffer GLES20.GlBindBuffer(GLES20.GlElementArrayBuffer, mIndexBufferId); mIndexCount = indices.Limit(); GLES20.GlBufferData( GLES20.GlElementArrayBuffer, 2 * mIndexCount, indices, GLES20.GlStaticDraw); GLES20.GlBindBuffer(GLES20.GlElementArrayBuffer, 0); ShaderUtil.CheckGLError(TAG, "OBJ buffer load"); int vertexShader = ShaderUtil.LoadGLShader(TAG, context, GLES20.GlVertexShader, Resource.Raw.object_vertex); int fragmentShader = ShaderUtil.LoadGLShader(TAG, context, GLES20.GlFragmentShader, Resource.Raw.object_fragment); mProgram = GLES20.GlCreateProgram(); GLES20.GlAttachShader(mProgram, vertexShader); GLES20.GlAttachShader(mProgram, fragmentShader); GLES20.GlLinkProgram(mProgram); GLES20.GlUseProgram(mProgram); ShaderUtil.CheckGLError(TAG, "Program creation"); mModelViewUniform = GLES20.GlGetUniformLocation(mProgram, "u_ModelView"); mModelViewProjectionUniform = GLES20.GlGetUniformLocation(mProgram, "u_ModelViewProjection"); mPositionAttribute = GLES20.GlGetAttribLocation(mProgram, "a_Position"); mNormalAttribute = GLES20.GlGetAttribLocation(mProgram, "a_Normal"); mTexCoordAttribute = GLES20.GlGetAttribLocation(mProgram, "a_TexCoord"); mTextureUniform = GLES20.GlGetUniformLocation(mProgram, "u_Texture"); mLightingParametersUniform = GLES20.GlGetUniformLocation(mProgram, "u_LightingParameters"); mMaterialParametersUniform = GLES20.GlGetUniformLocation(mProgram, "u_MaterialParameters"); ShaderUtil.CheckGLError(TAG, "Program parameters"); Android.Opengl.Matrix.SetIdentityM(mModelMatrix, 0); }
private int[] VBOBuffers = new int[2]; //2 buffers for vertices and colors public void OnSurfaceCreated(IGL10 gl, Javax.Microedition.Khronos.Egl.EGLConfig config) { const float edge = 1.0f; // X, Y, Z, float[] triangleVerticesData = { -1.5f, -0.25f, 0.0f, 0.5f, -0.25f, 0.0f, 0.0f, 0.559016994f, 0.0f }; FloatBuffer mTriangleVertices = ByteBuffer.AllocateDirect(triangleVerticesData.Length * mBytesPerFloat).Order(ByteOrder.NativeOrder()).AsFloatBuffer(); mTriangleVertices.Put(triangleVerticesData).Flip(); // R, G, B, A float[] triangleColorsData = { 1.0f, 0.0f, 0.0f, 0.5f, 0.0f, 0.5f, 1.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f }; FloatBuffer mTriangleColors = ByteBuffer.AllocateDirect(triangleColorsData.Length * mBytesPerFloat).Order(ByteOrder.NativeOrder()).AsFloatBuffer(); mTriangleColors.Put(triangleColorsData).Flip(); //Use VBO GLES20.GlGenBuffers(2, VBOBuffers, 0); //2 buffers for vertices and colors GLES20.GlBindBuffer(GLES20.GlArrayBuffer, VBOBuffers[0]); GLES20.GlBufferData(GLES20.GlArrayBuffer, mTriangleVertices.Capacity() * mBytesPerFloat, mTriangleVertices, GLES20.GlStaticDraw); GLES20.GlBindBuffer(GLES20.GlArrayBuffer, VBOBuffers[1]); GLES20.GlBufferData(GLES20.GlArrayBuffer, mTriangleColors.Capacity() * mBytesPerFloat, mTriangleColors, GLES20.GlStaticDraw); GLES20.GlBindBuffer(GLES20.GlArrayBuffer, 0); GLES20.GlClearColor(1.0f, 1.0f, 1.0f, 1.0f); // Position the eye behind the origin. float eyeX = 0.0f; float eyeY = 0.0f; float eyeZ = 4.5f; // We are looking toward the distance float lookX = 0.0f; float lookY = 0.0f; float lookZ = -5.0f; // Set our up vector. This is where our head would be pointing were we holding the camera. float upX = 0.0f; float upY = 1.0f; float upZ = 0.0f; // Set the view matrix. This matrix can be said to represent the camera position. // NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and // view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose. Matrix.SetLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ); string vertexShader = "uniform mat4 u_MVPMatrix; \n" // A constant representing the combined model/view/projection matrix. + "attribute vec4 a_Position; \n" // Per-vertex position information we will pass in. + "attribute vec4 a_Color; \n" // Per-vertex color information we will pass in. + "varying vec4 v_Color; \n" // This will be passed into the fragment shader. + "void main() \n" // The entry point for our vertex shader. + "{ \n" + " v_Color = a_Color; \n" // Pass the color through to the fragment shader. It will be interpolated across the triangle. + " gl_Position = u_MVPMatrix \n" // gl_Position is a special variable used to store the final position. + " * a_Position; \n" // Multiply the vertex by the matrix to get the final point in normalized screen coordinates. + "} \n"; string fragmentShader = "precision mediump float; \n" // Set the default precision to medium. We don't need as high of a // precision in the fragment shader. + "varying vec4 v_Color; \n" // This is the color from the vertex shader interpolated across the triangle per fragment. + "void main() \n" // The entry point for our fragment shader. + "{ \n" + " gl_FragColor = v_Color; \n" // Pass the color directly through the pipeline. + "} \n"; int vertexShaderHandle = GLES20.GlCreateShader(GLES20.GlVertexShader); if (vertexShaderHandle != 0) { // Pass in the shader source. GLES20.GlShaderSource(vertexShaderHandle, vertexShader); // Compile the shader. GLES20.GlCompileShader(vertexShaderHandle); // Get the compilation status. int[] compileStatus = new int[1]; GLES20.GlGetShaderiv(vertexShaderHandle, GLES20.GlCompileStatus, compileStatus, 0); // If the compilation failed, delete the shader. if (compileStatus[0] == 0) { GLES20.GlDeleteShader(vertexShaderHandle); vertexShaderHandle = 0; } } if (vertexShaderHandle == 0) { throw new Exception("Error creating vertex shader."); } // Load in the fragment shader shader. int fragmentShaderHandle = GLES20.GlCreateShader(GLES20.GlFragmentShader); if (fragmentShaderHandle != 0) { // Pass in the shader source. GLES20.GlShaderSource(fragmentShaderHandle, fragmentShader); // Compile the shader. GLES20.GlCompileShader(fragmentShaderHandle); // Get the compilation status. int[] compileStatus = new int[1]; GLES20.GlGetShaderiv(fragmentShaderHandle, GLES20.GlCompileStatus, compileStatus, 0); // If the compilation failed, delete the shader. if (compileStatus[0] == 0) { GLES20.GlDeleteShader(fragmentShaderHandle); fragmentShaderHandle = 0; } } if (fragmentShaderHandle == 0) { throw new Exception("Error creating fragment shader."); } // Create a program object and store the handle to it. int programHandle = GLES20.GlCreateProgram(); if (programHandle != 0) { // Bind the vertex shader to the program. GLES20.GlAttachShader(programHandle, vertexShaderHandle); // Bind the fragment shader to the program. GLES20.GlAttachShader(programHandle, fragmentShaderHandle); // Bind attributes GLES20.GlBindAttribLocation(programHandle, 0, "a_Position"); GLES20.GlBindAttribLocation(programHandle, 1, "a_Color"); // Link the two shaders together into a program. GLES20.GlLinkProgram(programHandle); // Get the link status. int[] linkStatus = new int[1]; GLES20.GlGetProgramiv(programHandle, GLES20.GlLinkStatus, linkStatus, 0); // If the link failed, delete the program. if (linkStatus[0] == 0) { GLES20.GlDeleteProgram(programHandle); programHandle = 0; } } if (programHandle == 0) { throw new Exception("Error creating program."); } // Set program handles. These will later be used to pass in values to the program. mMVPMatrixHandle = GLES20.GlGetUniformLocation(programHandle, "u_MVPMatrix"); mPositionHandle = GLES20.GlGetAttribLocation(programHandle, "a_Position"); mColorHandle = GLES20.GlGetAttribLocation(programHandle, "a_Color"); // Tell OpenGL to use this program when rendering. GLES20.GlUseProgram(programHandle); }
public void CreateOnGlThread(Context context, string objAssetName, string diffuseTextureAssetName) { // Read the texture. var textureBitmap = BitmapFactory.DecodeStream(context.Assets.Open(diffuseTextureAssetName)); GLES20.GlActiveTexture(GLES20.GlTexture0); GLES20.GlGenTextures(mTextures.Length, mTextures, 0); GLES20.GlBindTexture(GLES20.GlTexture2d, mTextures[0]); GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMinFilter, GLES20.GlLinearMipmapLinear); GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMagFilter, GLES20.GlLinear); GLUtils.TexImage2D(GLES20.GlTexture2d, 0, textureBitmap, 0); GLES20.GlGenerateMipmap(GLES20.GlTexture2d); GLES20.GlBindTexture(GLES20.GlTexture2d, 0); textureBitmap.Recycle(); ShaderUtil.CheckGLError(TAG, "Texture loading"); // Read the obj file. var objInputStream = context.Assets.Open(objAssetName); var obj = JavaGl.Obj.ObjReader.Read(objInputStream); obj = JavaGl.Obj.ObjUtils.ConvertToRenderable(obj); IntBuffer wideIndices = JavaGl.Obj.ObjData.GetFaceVertexIndices(obj, 3); FloatBuffer vertices = JavaGl.Obj.ObjData.GetVertices(obj); FloatBuffer texCoords = JavaGl.Obj.ObjData.GetTexCoords(obj, 2); FloatBuffer normals = JavaGl.Obj.ObjData.GetNormals(obj); ShortBuffer indices = ByteBuffer.AllocateDirect(2 * wideIndices.Limit()) .Order(ByteOrder.NativeOrder()).AsShortBuffer(); while (wideIndices.HasRemaining) { indices.Put((short)wideIndices.Get()); } indices.Rewind(); var buffers = new int[2]; GLES20.GlGenBuffers(2, buffers, 0); mVertexBufferId = buffers[0]; mIndexBufferId = buffers[1]; mVerticesBaseAddress = 0; mTexCoordsBaseAddress = mVerticesBaseAddress + 4 * vertices.Limit(); mNormalsBaseAddress = mTexCoordsBaseAddress + 4 * texCoords.Limit(); int totalBytes = mNormalsBaseAddress + 4 * normals.Limit(); GLES20.GlBindBuffer(GLES20.GlArrayBuffer, mVertexBufferId); GLES20.GlBufferData(GLES20.GlArrayBuffer, totalBytes, null, GLES20.GlStaticDraw); GLES20.GlBufferSubData( GLES20.GlArrayBuffer, mVerticesBaseAddress, 4 * vertices.Limit(), vertices); GLES20.GlBufferSubData( GLES20.GlArrayBuffer, mTexCoordsBaseAddress, 4 * texCoords.Limit(), texCoords); GLES20.GlBufferSubData( GLES20.GlArrayBuffer, mNormalsBaseAddress, 4 * normals.Limit(), normals); GLES20.GlBindBuffer(GLES20.GlArrayBuffer, 0); GLES20.GlBindBuffer(GLES20.GlElementArrayBuffer, mIndexBufferId); mIndexCount = indices.Limit(); GLES20.GlBufferData( GLES20.GlElementArrayBuffer, 2 * mIndexCount, indices, GLES20.GlStaticDraw); GLES20.GlBindBuffer(GLES20.GlElementArrayBuffer, 0); ShaderUtil.CheckGLError(TAG, "OBJ buffer load"); int vertexShader = ShaderUtil.LoadGLShader(TAG, context, GLES20.GlVertexShader, Resource.Raw.object_vertex); int fragmentShader = ShaderUtil.LoadGLShader(TAG, context, GLES20.GlFragmentShader, Resource.Raw.object_fragment); mProgram = GLES20.GlCreateProgram(); GLES20.GlAttachShader(mProgram, vertexShader); GLES20.GlAttachShader(mProgram, fragmentShader); GLES20.GlLinkProgram(mProgram); GLES20.GlUseProgram(mProgram); ShaderUtil.CheckGLError(TAG, "Program creation"); mModelViewUniform = GLES20.GlGetUniformLocation(mProgram, "u_ModelView"); mModelViewProjectionUniform = GLES20.GlGetUniformLocation(mProgram, "u_ModelViewProjection"); mPositionAttribute = GLES20.GlGetAttribLocation(mProgram, "a_Position"); mNormalAttribute = GLES20.GlGetAttribLocation(mProgram, "a_Normal"); mTexCoordAttribute = GLES20.GlGetAttribLocation(mProgram, "a_TexCoord"); mTextureUniform = GLES20.GlGetUniformLocation(mProgram, "u_Texture"); mLightingParametersUniform = GLES20.GlGetUniformLocation(mProgram, "u_LightingParameters"); mMaterialParametersUniform = GLES20.GlGetUniformLocation(mProgram, "u_MaterialParameters"); ShaderUtil.CheckGLError(TAG, "Program parameters"); Android.Opengl.Matrix.SetIdentityM(mModelMatrix, 0); }