static WebGLTexture InternalConstructor(WebGLRenderingContext gl) { // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeWebGLFrameBuffer\ChromeWebGLFrameBuffer\Application.cs // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeShaderToyColumns\ChromeShaderToyColumns\Library\ShaderToy.cs var p = gl.createTexture(); return(p); }
public void onSurfaceCreated(GL10 glUnused, javax.microedition.khronos.egl.EGLConfig config) { // Set the background clear color to black. gl.clearColor(0.0f, 0.0f, 0.0f, 0.0f); // Use culling to remove back faces. gl.enable(gl.CULL_FACE); // Enable depth testing gl.enable(gl.DEPTH_TEST); // Enable texture mapping gl.enable(gl.TEXTURE_2D); // Position the eye in front of the origin. float eyeX = 0.0f; float eyeY = 0.0f; float eyeZ = -0.5f; // We are looking toward the distance float lookX = 0.0f; float lookY = 0.0f; float lookZ = -5.0f; // Set our up vector. This is where our head would be pointing were we holding the camera. float upX = 0.0f; float upY = 1.0f; float upZ = 0.0f; // Set the view matrix. This matrix can be said to represent the camera position. // NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and // view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose. Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ); mProgramHandle = gl.createProgram( new Shaders.per_pixelVertexShader(), new Shaders.per_pixelFragmentShader() ); gl.bindAttribLocation(mProgramHandle, 0, "a_Position"); gl.bindAttribLocation(mProgramHandle, 1, "a_Color"); gl.bindAttribLocation(mProgramHandle, 2, "a_Normal"); gl.bindAttribLocation(mProgramHandle, 3, "a_TexCoordinate"); gl.linkProgram(mProgramHandle); // Define a simple shader program for our point. mPointProgramHandle = gl.createProgram( new Shaders.pointVertexShader(), new Shaders.pointFragmentShader() ); gl.bindAttribLocation(mPointProgramHandle, 0, "a_Position"); gl.linkProgram(mPointProgramHandle); #region loadTexture Func <android.graphics.Bitmap, WebGLTexture> loadTexture = (bitmap) => { var textureHandle = gl.createTexture(); // Bind to the texture in OpenGL gl.bindTexture(gl.TEXTURE_2D, textureHandle); // Set filtering gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, (int)gl.NEAREST); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, (int)gl.NEAREST); // Load the bitmap into the bound texture. //gl.texImage2D( GLUtils.texImage2D((int)gl.TEXTURE_2D, 0, bitmap, 0); // Recycle the bitmap, since its data has been loaded into OpenGL. bitmap.recycle(); return(textureHandle); }; #endregion #region openFileFromAssets Func <string, InputStream> openFileFromAssets = (string spath) => { InputStream value = null; try { value = this.mActivityContext.getResources().getAssets().open(spath); } catch { } return(value); }; #endregion // Read in the resource var bumpy_bricks_public_domain = android.graphics.BitmapFactory.decodeStream( openFileFromAssets("bumpy_bricks_public_domain.jpg") ); // Load the texture mTextureDataHandle = loadTexture( bumpy_bricks_public_domain ); gl.generateMipmap(gl.TEXTURE_2D); }