public void onSurfaceCreated(GL10 glUnused, javax.microedition.khronos.egl.EGLConfig config)
            {
                // Set the background clear color to black.
                gl.clearColor(0.0f, 0.0f, 0.0f, 0.0f);

                // Use culling to remove back faces.
                gl.enable(gl.CULL_FACE);

                // Enable depth testing
                gl.enable(gl.DEPTH_TEST);

                // Position the eye in front of the origin.
                float eyeX = 0.0f;
                float eyeY = 0.0f;
                float eyeZ = -0.5f;

                // We are looking toward the distance
                float lookX = 0.0f;
                float lookY = 0.0f;
                float lookZ = -5.0f;

                // Set our up vector. This is where our head would be pointing were we holding the camera.
                float upX = 0.0f;
                float upY = 1.0f;
                float upZ = 0.0f;

                // Set the view matrix. This matrix can be said to represent the camera position.
                // NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
                // view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
                Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);


                mPerVertexProgramHandle = gl.createProgram(
                    new Shaders.TriangleVertexShader(),
                    new Shaders.TriangleFragmentShader()
                    );

                gl.bindAttribLocation(mPerVertexProgramHandle, 0, "a_Position");
                gl.bindAttribLocation(mPerVertexProgramHandle, 1, "a_Color");
                gl.bindAttribLocation(mPerVertexProgramHandle, 2, "a_Normal");

                gl.linkProgram(mPerVertexProgramHandle);

                // Define a simple shader program for our point.

                mPointProgramHandle = gl.createProgram(
                    new Shaders.pointVertexShader(),
                    new Shaders.pointFragmentShader()
                    );

                gl.bindAttribLocation(mPointProgramHandle, 0, "a_Position");

                gl.linkProgram(mPointProgramHandle);
            }
Exemple #2
0
        public void onSurfaceCreated(GL10 unused, EGLConfig config)
        {
            // if onSurfaceCreated is called while resuming from pause,
            // it means the GL context was lost
            paused.compareAndSet(1, -1);

            // assign high priority to the rendering thread
            java.lang.Thread.currentThread()
            .setPriority(java.lang.Thread.MAX_PRIORITY);

            // set swap interval
            if (swapInterval != 1)
            {
                var eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
                EGL14.eglSwapInterval(eglDisplay, swapInterval);
            }
        }
        public void onSurfaceCreated(javax.microedition.khronos.egl.EGLConfig value)
        {
            Console.WriteLine("enter AndroidCardboardExperiment onSurfaceCreated");

            GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well.

            ByteBuffer bbVertices = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_COORDS.Length * 4);

            bbVertices.order(ByteOrder.nativeOrder());
            cubeVertices = bbVertices.asFloatBuffer();
            cubeVertices.put(WorldLayoutData.CUBE_COORDS);
            cubeVertices.position(0);

            ByteBuffer bbColors = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_COLORS.Length * 4);

            bbColors.order(ByteOrder.nativeOrder());
            cubeColors = bbColors.asFloatBuffer();
            cubeColors.put(WorldLayoutData.CUBE_COLORS);
            cubeColors.position(0);

            ByteBuffer bbFoundColors = ByteBuffer.allocateDirect(
                WorldLayoutData.CUBE_FOUND_COLORS.Length * 4);

            bbFoundColors.order(ByteOrder.nativeOrder());
            cubeFoundColors = bbFoundColors.asFloatBuffer();
            cubeFoundColors.put(WorldLayoutData.CUBE_FOUND_COLORS);
            cubeFoundColors.position(0);

            ByteBuffer bbNormals = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_NORMALS.Length * 4);

            bbNormals.order(ByteOrder.nativeOrder());
            cubeNormals = bbNormals.asFloatBuffer();
            cubeNormals.put(WorldLayoutData.CUBE_NORMALS);
            cubeNormals.position(0);

            // make a floor
            ByteBuffer bbFloorVertices = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_COORDS.Length * 4);

            bbFloorVertices.order(ByteOrder.nativeOrder());
            floorVertices = bbFloorVertices.asFloatBuffer();
            floorVertices.put(WorldLayoutData.FLOOR_COORDS);
            floorVertices.position(0);

            ByteBuffer bbFloorNormals = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_NORMALS.Length * 4);

            bbFloorNormals.order(ByteOrder.nativeOrder());
            floorNormals = bbFloorNormals.asFloatBuffer();
            floorNormals.put(WorldLayoutData.FLOOR_NORMALS);
            floorNormals.position(0);

            var fcolors = 0xA26D41;

            // rgb to float

            //[javac]         return  __Enumerable.<Float>AsEnumerable(__SZArrayEnumerator_1.<Float>Of(x));
            //[javac]                                                                       ^
            //[javac]   required: T#1[]
            //[javac]   found: float[]
            //[javac]   reason: actual argument float[] cannot be converted to Float[] by method invocation conversion

            //          var FLOOR_COLORS = (
            //              from i in Enumerable.Range(0, 6)
            //              select new float[] { 0xA2 / 1.0f, 0x6D / 1.0f, 0x41 / 1.0f, 1.0f }
            //).SelectMany(x => x).ToArray();

            #region floorColors
            var FLOOR_COLORS = new float[4 * 6];

            for (int i = 0; i < FLOOR_COLORS.Length; i += 4)
            {
                FLOOR_COLORS[i + 0] = 0xA2 / 100.0f;
                FLOOR_COLORS[i + 1] = 0x6D / 100.0f;
                FLOOR_COLORS[i + 2] = 0x41 / 100.0f;
                FLOOR_COLORS[i + 3] = 1.0f;
            }



            FloatBuffer floorColors;

            ByteBuffer bbFloorColors = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_COLORS.Length * 4);
            bbFloorColors.order(ByteOrder.nativeOrder());
            floorColors = bbFloorColors.asFloatBuffer();
            //floorColors.put(WorldLayoutData.FLOOR_COLORS);
            floorColors.put(FLOOR_COLORS);
            floorColors.position(0);
            #endregion


            #region loadGLShader
            Func <int, ScriptCoreLib.GLSL.Shader, int> loadGLShader = (type, xshader) =>
            {
                var code = xshader.ToString();

                int shader = GLES20.glCreateShader(type);
                GLES20.glShaderSource(shader, code);
                GLES20.glCompileShader(shader);

                // Get the compilation status.
                int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                // If the compilation failed, delete the shader.
                if (compileStatus[0] == 0)
                {
                    Console.WriteLine("Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
                    GLES20.glDeleteShader(shader);
                    shader = 0;
                }

                if (shader == 0)
                {
                    throw new Exception("Error creating shader.");
                }

                return(shader);
            };
            #endregion


            int vertexShader      = loadGLShader(GLES20.GL_VERTEX_SHADER, new AndroidCardboardExperiment.Shaders.light_vertexVertexShader());
            int gridShader        = loadGLShader(GLES20.GL_FRAGMENT_SHADER, new Shaders.xgrid_fragmentFragmentShader());
            int passthroughShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, new AndroidCardboardExperiment.Shaders.passthrough_fragmentFragmentShader());

            cubeProgram = GLES20.glCreateProgram();
            GLES20.glAttachShader(cubeProgram, vertexShader);
            GLES20.glAttachShader(cubeProgram, passthroughShader);
            GLES20.glLinkProgram(cubeProgram);
            GLES20.glUseProgram(cubeProgram);

            checkGLError("Cube program");

            cubePositionParam = GLES20.glGetAttribLocation(cubeProgram, "a_Position");
            cubeNormalParam   = GLES20.glGetAttribLocation(cubeProgram, "a_Normal");
            cubeColorParam    = GLES20.glGetAttribLocation(cubeProgram, "a_Color");

            cubeModelParam               = GLES20.glGetUniformLocation(cubeProgram, "u_Model");
            cubeModelViewParam           = GLES20.glGetUniformLocation(cubeProgram, "u_MVMatrix");
            cubeModelViewProjectionParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVP");
            cubeLightPosParam            = GLES20.glGetUniformLocation(cubeProgram, "u_LightPos");

            GLES20.glEnableVertexAttribArray(cubePositionParam);
            GLES20.glEnableVertexAttribArray(cubeNormalParam);
            GLES20.glEnableVertexAttribArray(cubeColorParam);

            checkGLError("Cube program params");

            floorProgram = GLES20.glCreateProgram();
            GLES20.glAttachShader(floorProgram, vertexShader);
            GLES20.glAttachShader(floorProgram, gridShader);
            GLES20.glLinkProgram(floorProgram);
            GLES20.glUseProgram(floorProgram);

            checkGLError("Floor program");

            floorModelParam               = GLES20.glGetUniformLocation(floorProgram, "u_Model");
            floorModelViewParam           = GLES20.glGetUniformLocation(floorProgram, "u_MVMatrix");
            floorModelViewProjectionParam = GLES20.glGetUniformLocation(floorProgram, "u_MVP");
            floorLightPosParam            = GLES20.glGetUniformLocation(floorProgram, "u_LightPos");

            floorPositionParam = GLES20.glGetAttribLocation(floorProgram, "a_Position");
            floorNormalParam   = GLES20.glGetAttribLocation(floorProgram, "a_Normal");
            floorColorParam    = GLES20.glGetAttribLocation(floorProgram, "a_Color");

            GLES20.glEnableVertexAttribArray(floorPositionParam);
            GLES20.glEnableVertexAttribArray(floorNormalParam);
            GLES20.glEnableVertexAttribArray(floorColorParam);

            checkGLError("Floor program params");

            GLES20.glEnable(GLES20.GL_DEPTH_TEST);
            //GLES20.glEnable(GLES20.GL_FOG);



            checkGLError("onSurfaceCreated");

            Console.WriteLine("exit AndroidCardboardExperiment onSurfaceCreated");


            vFinishFrame = (com.google.vrtoolkit.cardboard.Viewport v) =>
            {
                // GPU thread stops now..
                FrameOne.Stop();
            };

            // I/System.Console(28103): CardboardForEdgeExperiment { ProcessorCount = 8, MODEL = SM-G925F, CurrentManagedThreadId = 11305, FrameCounter = 28, LastFrameMilliseconds = 40, codeFPS = 25.0, pitch = 1.579644, yaw = 1.6225219 }

            #region vNewFrame
            vNewFrame = (com.google.vrtoolkit.cardboard.HeadTransform headTransform) =>
            {
                // http://stackoverflow.com/questions/11851343/raise-fps-on-android-tablet-above-60-for-opengl-game
                // http://gafferongames.com/game-physics/fix-your-timestep/

                #region FrameWatch
                if (FrameWatch.ElapsedMilliseconds >= 1000)
                {
                    var codeFPS = 1000.0 / FrameOne.ElapsedMilliseconds;

                    // we now know how many frames did fit into it
                    // need 60 or more!
                    Console.WriteLine("CardboardForEdgeExperiment " + new
                    {
                        // static
                        System.Environment.ProcessorCount,

                        android.os.Build.MODEL,

                        System.Environment.CurrentManagedThreadId,

                        FrameCounter,

                        // dynamic
                        LastFrameMilliseconds = FrameOne.ElapsedMilliseconds,
                        codeFPS,

                        // very dynamic
                        pitch,
                        yaw
                    });

                    // I/System.Console(28117): CardboardForEdgeExperiment { ProcessorCount = 2, MODEL = Nexus 9, CurrentManagedThreadId = 1647, FrameCounter = 60, LastFrameMilliseconds = 6, codeFPS = 166.66666666666666, pitch = 1.5978987, yaw = -2.0770574 }

                    FrameWatch.Restart();
                    FrameCounter = 0;
                }

                #endregion
                // GPU thread starts now..
                FrameOne.Restart();
                FrameCounter++;


                //Console.WriteLine("AndroidCardboardExperiment onNewFrame");



                headTransform.getHeadView(headView, 0);

                checkGLError("onReadyToDraw");

                // I/System.Console(27769): CardboardForEdgeExperiment { FrameCounter = 61, LastFrameMilliseconds = 0, codeFPS = Infinity, CurrentManagedThreadId = 1637, ProcessorCount = 2, MODEL = Nexus 9 }

                // add placeholder slowdown
                //System.Threading.Thread.Sleep(5);
                // I/System.Console(27840): CardboardForEdgeExperiment { FrameCounter = 60, LastFrameMilliseconds = 6, codeFPS = 166.66666666666666, CurrentManagedThreadId = 1642, ProcessorCount = 2, MODEL = Nexus 9 }
            };
            #endregion

            // if we define it here, we get to see it in vr...
            var modelCube = new float[16];

            // I/System.Console(19917): CardboardForEdgeExperiment { ProcessorCount = 8, MODEL = SM-G925F, CurrentManagedThreadId = 9959, FrameCounter = 46, LastFrameMilliseconds = 6, codeFPS = 166.66666666666666, pitch = 0.9070491, yaw = -0.3660261 }

            #region vDrawEye
            vDrawEye = (com.google.vrtoolkit.cardboard.Eye eye) =>
            {
                // VIDEO via "X:\util\android-sdk-windows\tools\ddms.bat"

                var camera = new float[16];


                // static void	setLookAtM(float[] rm, int rmOffset, float eyeX, float eyeY, float eyeZ, float centerX, float centerY, float centerZ, float upX, float upY, float upZ)
                // Build the camera matrix and apply it to the ModelView.
                Matrix.setLookAtM(camera, 0,

                                  0.0f, 0.0f, CAMERA_Z,

                                  0f, 0.0f, 0.0f,

                                  0.0f, 1.0f, 0.0f);


                #region glClearColor
                // skybox/video instead?
                GLES20.glClearColor(
                    0x87 / 255f,
                    0xCE / 255f,
                    0xEB / 255f, 1.0f
                    );

                GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
                #endregion



                var view = new float[16];

                // can we strafe?



                // Apply the eye transformation to the camera.
                Matrix.multiplyMM(view, 0, eye.getEyeView(), 0, camera, 0);


                // we tapped into it. this strafes ius!
                Matrix.translateM(view, 0,

                                  (float)Math.Sin(TotalTime.ElapsedMilliseconds * 0.0001f) * objectDistance * 2.5f,


                                  // up down
                                  //(float)Math.Sin(TotalTime.ElapsedMilliseconds * 0.001f) * floorDepth * 0.5f,
                                  (float)Math.Cos(TotalTime.ElapsedMilliseconds * 0.001f) * floorDepth * 0.1f,

                                  0
                                  );


                // Set the position of the light
                Matrix.multiplyMV(lightPosInEyeSpace, 0, view, 0, LIGHT_POS_IN_WORLD_SPACE, 0);

                // Build the ModelView and ModelViewProjection matrices
                // for calculating cube position and light.
                float[] perspective = eye.getPerspective(Z_NEAR, Z_FAR);


                // just a buffer?
                var modelView = new float[16];


                #region drawCube()
                Action <float, float, float> drawCube = (tx, ty, tz) =>
                {
                    #region isLookingAtObject
                    Func <bool> isLookingAtObject = () =>
                    {
                        float[] initVec = { 0, 0, 0, 1.0f };



                        float[] objPositionVec = new float[4];

                        // Convert object space to camera space. Use the headView from onNewFrame.
                        Matrix.multiplyMM(modelView, 0, headView, 0, modelCube, 0);
                        Matrix.multiplyMV(objPositionVec, 0, modelView, 0, initVec, 0);



                        pitch = (float)Math.Atan2(objPositionVec[1], -objPositionVec[2]);
                        yaw   = (float)Math.Atan2(objPositionVec[0], -objPositionVec[2]);

                        if (Math.Abs(pitch) < PITCH_LIMIT)
                        {
                            if (Math.Abs(yaw) < YAW_LIMIT)
                            {
                                return(true);
                            }
                        }
                        return(false);
                    };
                    #endregion



                    // Object first appears directly in front of user.
                    Matrix.setIdentityM(modelCube, 0);
                    // cant see it?
                    var scale = 5.0f;
                    //Matrix.scaleM(modelCube, 0, scale, scale, scale);

                    Matrix.translateM(modelCube, 0, tx, ty, tz);


                    Matrix.multiplyMM(modelView, 0, view, 0, modelCube, 0);
                    Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0);


                    // public static void scaleM (float[] m, int mOffset, float x, float y, float z)

                    // Build the Model part of the ModelView matrix.
                    //Matrix.rotateM(modelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);

                    // cant see rotation?
                    Matrix.rotateM(modelCube, 0, TotalTime.ElapsedMilliseconds * 0.01f,
                                   // upwards rot.
                                   //0.5f,

                                   0f,

                                   // sideways, left to right
                                   0.5f
                                   , 0.0f);


                    // http://developer.android.com/reference/android/opengl/Matrix.html#translateM(float[], int, float, float, float)


                    // the cube rotates in front of us.
                    // do we need to use a special program to draw a cube?
                    // how can we make it bigger?

                    GLES20.glUseProgram(cubeProgram);

                    GLES20.glUniform3fv(cubeLightPosParam, 1, lightPosInEyeSpace, 0);

                    // Set the Model in the shader, used to calculate lighting
                    GLES20.glUniformMatrix4fv(cubeModelParam, 1, false, modelCube, 0);

                    // Set the ModelView in the shader, used to calculate lighting
                    GLES20.glUniformMatrix4fv(cubeModelViewParam, 1, false, modelView, 0);

                    // Set the position of the cube
                    GLES20.glVertexAttribPointer(cubePositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, cubeVertices);

                    // Set the ModelViewProjection matrix in the shader.
                    GLES20.glUniformMatrix4fv(cubeModelViewProjectionParam, 1, false, modelViewProjection, 0);

                    // Set the normal positions of the cube, again for shading
                    GLES20.glVertexAttribPointer(cubeNormalParam, 3, GLES20.GL_FLOAT, false, 0, cubeNormals);


                    #region cubeColors
                    var cc = cubeColors;
                    if (!isLookingAtObject())
                    {
                        cc = cubeFoundColors;
                    }

                    GLES20.glVertexAttribPointer(cubeColorParam, 4, GLES20.GL_FLOAT, false, 0, cc);
                    #endregion

                    GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
                    checkGLError("Drawing cube");
                };


                #endregion

                #region drawCube
                drawCube(0, objectDistance, objectDistance * -1.0f);


                drawCube(0, 0, objectDistance * -2.0f);

                // looks like an airstrip

                // low fps?
                //var endOfMatrix = 64;
                var endOfMatrix = 20;
                for (int i = -endOfMatrix; i < endOfMatrix; i++)
                {
                    drawCube(objectDistance, -floorDepth, objectDistance * -2.0f * i);
                    drawCube(-objectDistance, -floorDepth, objectDistance * -2.0f * i);


                    drawCube(objectDistance * 0.5f, 0, objectDistance * -2.0f * i);
                    drawCube(objectDistance * -0.5f, 0, objectDistance * -2.0f * i);
                }
                #endregion



                var modelFloor = new float[16];

                Matrix.setIdentityM(modelFloor, 0);
                Matrix.translateM(modelFloor, 0,

                                                      // the floor escapes!
                                                      //TotalTime.ElapsedMilliseconds * 0.01f,
                                  0, -floorDepth, 0); // Floor appears below user.

                // Set modelView for the floor, so we draw floor in the correct location
                Matrix.multiplyMM(modelView, 0, view, 0, modelFloor, 0);
                Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0);

                #region drawFloor
                // called by onDrawEye
                Action drawFloor = delegate
                {
                    GLES20.glUseProgram(floorProgram);

                    // Set ModelView, MVP, position, normals, and color.
                    GLES20.glUniform3fv(floorLightPosParam, 1, lightPosInEyeSpace, 0);
                    GLES20.glUniformMatrix4fv(floorModelParam, 1, false, modelFloor, 0);
                    GLES20.glUniformMatrix4fv(floorModelViewParam, 1, false, modelView, 0);
                    GLES20.glUniformMatrix4fv(floorModelViewProjectionParam, 1, false,
                                              modelViewProjection, 0);
                    GLES20.glVertexAttribPointer(floorPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
                                                 false, 0, floorVertices);
                    GLES20.glVertexAttribPointer(floorNormalParam, 3, GLES20.GL_FLOAT, false, 0,
                                                 floorNormals);
                    GLES20.glVertexAttribPointer(floorColorParam, 4, GLES20.GL_FLOAT, false, 0, floorColors);

                    GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6);

                    checkGLError("drawing floor");
                };

                drawFloor();
                #endregion
            };
            #endregion
        }
Exemple #4
0
 global::javax.microedition.khronos.egl.EGLContext android.opengl.GLSurfaceView.EGLContextFactory.createContext(javax.microedition.khronos.egl.EGL10 arg0, javax.microedition.khronos.egl.EGLDisplay arg1, javax.microedition.khronos.egl.EGLConfig arg2)
 {
     global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
     if (!IsClrObject)
     {
         return(global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::android.opengl.GLSurfaceView.EGLContextFactory_._createContext6059, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2))) as javax.microedition.khronos.egl.EGLContext);
     }
     else
     {
         return(global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::android.opengl.GLSurfaceView.EGLContextFactory_.staticClass, global::android.opengl.GLSurfaceView.EGLContextFactory_._createContext6059, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2))) as javax.microedition.khronos.egl.EGLContext);
     }
 }
Exemple #5
0
 void android.opengl.GLSurfaceView.Renderer.onSurfaceCreated(javax.microedition.khronos.opengles.GL10 arg0, javax.microedition.khronos.egl.EGLConfig arg1)
 {
     global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
     if (!IsClrObject)
     {
         @__env.CallVoidMethod(this.JvmHandle, global::android.opengl.GLSurfaceView.Renderer_._onSurfaceCreated6064, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1));
     }
     else
     {
         @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.opengl.GLSurfaceView.Renderer_.staticClass, global::android.opengl.GLSurfaceView.Renderer_._onSurfaceCreated6064, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1));
     }
 }
Exemple #6
0
 global::javax.microedition.khronos.egl.EGLSurface javax.microedition.khronos.egl.EGL10.eglCreatePbufferSurface(javax.microedition.khronos.egl.EGLDisplay arg0, javax.microedition.khronos.egl.EGLConfig arg1, int[] arg2)
 {
     global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
     if (!IsClrObject)
     {
         return(global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::javax.microedition.khronos.egl.EGL10_._eglCreatePbufferSurface15799, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2))) as javax.microedition.khronos.egl.EGLSurface);
     }
     else
     {
         return(global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::javax.microedition.khronos.egl.EGL10_.staticClass, global::javax.microedition.khronos.egl.EGL10_._eglCreatePbufferSurface15799, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2))) as javax.microedition.khronos.egl.EGLSurface);
     }
 }
Exemple #7
0
 bool javax.microedition.khronos.egl.EGL10.eglGetConfigAttrib(javax.microedition.khronos.egl.EGLDisplay arg0, javax.microedition.khronos.egl.EGLConfig arg1, int arg2, int[] arg3)
 {
     global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
     if (!IsClrObject)
     {
         return(@__env.CallBooleanMethod(this.JvmHandle, global::javax.microedition.khronos.egl.EGL10_._eglGetConfigAttrib15804, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3)));
     }
     else
     {
         return(@__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::javax.microedition.khronos.egl.EGL10_.staticClass, global::javax.microedition.khronos.egl.EGL10_._eglGetConfigAttrib15804, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3)));
     }
 }
            public void onSurfaceCreated(GL10 glUnused, javax.microedition.khronos.egl.EGLConfig config)
            {
                // Set the background clear color to black.
                gl.clearColor(0.0f, 0.0f, 0.0f, 0.0f);

                // Use culling to remove back faces.
                gl.enable(gl.CULL_FACE);

                // Enable depth testing
                gl.enable(gl.DEPTH_TEST);

                // Enable texture mapping
                gl.enable(gl.TEXTURE_2D);

                // Position the eye in front of the origin.
                float eyeX = 0.0f;
                float eyeY = 0.0f;
                float eyeZ = -0.5f;

                // We are looking toward the distance
                float lookX = 0.0f;
                float lookY = 0.0f;
                float lookZ = -5.0f;

                // Set our up vector. This is where our head would be pointing were we holding the camera.
                float upX = 0.0f;
                float upY = 1.0f;
                float upZ = 0.0f;

                // Set the view matrix. This matrix can be said to represent the camera position.
                // NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
                // view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
                Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);



                mProgramHandle = gl.createProgram(
                    new Shaders.per_pixelVertexShader(),
                    new Shaders.per_pixelFragmentShader()
                    );

                gl.bindAttribLocation(mProgramHandle, 0, "a_Position");
                gl.bindAttribLocation(mProgramHandle, 1, "a_Color");
                gl.bindAttribLocation(mProgramHandle, 2, "a_Normal");
                gl.bindAttribLocation(mProgramHandle, 3, "a_TexCoordinate");

                gl.linkProgram(mProgramHandle);

                // Define a simple shader program for our point.

                mPointProgramHandle = gl.createProgram(
                    new Shaders.pointVertexShader(),
                    new Shaders.pointFragmentShader()
                    );

                gl.bindAttribLocation(mPointProgramHandle, 0, "a_Position");

                gl.linkProgram(mPointProgramHandle);


                #region loadTexture
                Func <android.graphics.Bitmap, WebGLTexture> loadTexture = (bitmap) =>
                {
                    var textureHandle = gl.createTexture();

                    // Bind to the texture in OpenGL
                    gl.bindTexture(gl.TEXTURE_2D, textureHandle);

                    // Set filtering
                    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, (int)gl.NEAREST);
                    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, (int)gl.NEAREST);

                    // Load the bitmap into the bound texture.
                    //gl.texImage2D(
                    GLUtils.texImage2D((int)gl.TEXTURE_2D, 0, bitmap, 0);

                    // Recycle the bitmap, since its data has been loaded into OpenGL.
                    bitmap.recycle();


                    return(textureHandle);
                };
                #endregion

                #region openFileFromAssets
                Func <string, InputStream> openFileFromAssets = (string spath) =>
                {
                    InputStream value = null;
                    try
                    {
                        value = this.mActivityContext.getResources().getAssets().open(spath);
                    }
                    catch
                    {
                    }
                    return(value);
                };
                #endregion


                // Read in the resource
                var bumpy_bricks_public_domain = android.graphics.BitmapFactory.decodeStream(
                    openFileFromAssets("bumpy_bricks_public_domain.jpg")
                    );

                // Load the texture
                mTextureDataHandle = loadTexture(
                    bumpy_bricks_public_domain
                    );

                gl.generateMipmap(gl.TEXTURE_2D);
            }
Exemple #9
0
 internal virtual javax.microedition.khronos.egl.EGLContext createContext(javax.microedition.khronos.egl.EGL10
                                                                          egl, javax.microedition.khronos.egl.EGLDisplay eglDisplay, javax.microedition.khronos.egl.EGLConfig
                                                                          eglConfig)
 {
     throw new System.NotImplementedException();
 }
Exemple #10
0
 private void printConfig(javax.microedition.khronos.egl.EGLConfig config)
 {
     throw new System.NotImplementedException();
 }