Inheritance: global::java.nio.Buffer, global::java.lang.Comparable
            private void initTriangle()
            {
                // float has 4 bytes
                ByteBuffer vbb = ByteBuffer.allocateDirect(_nrOfVertices * 3 * 4);
                vbb.order(ByteOrder.nativeOrder());
                _vertexBuffer = vbb.asFloatBuffer();

                // short has 2 bytes
                ByteBuffer ibb = ByteBuffer.allocateDirect(_nrOfVertices * 2);
                ibb.order(ByteOrder.nativeOrder());
                _indexBuffer = ibb.asShortBuffer();

                float[] coords = {
                    -0.5f, -0.5f, 0f, // (x1, y1, z1)
                    0.5f, -0.5f, 0f, // (x2, y2, z2)
                    0f, 0.5f, 0f // (x3, y3, z3)
                };

                _vertexBuffer.put(coords);
                _indexBuffer.put(_indicesArray);

                _vertexBuffer.position(0);
                _indexBuffer.position(0);
            }
Esempio n. 2
0
 public static void cvSnakeImage(object ii, object cp, int i1, FloatBuffer fb1, FloatBuffer fb2, FloatBuffer fb3, int i2, object cs, object ctc, int i3)
 {
   JNI.Frame frame = (JNI.Frame) null;
   if (opencv_legacy.__\u003Cjniptr\u003EcvSnakeImage\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024IplImage\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint\u003BILjava\u002Fnio\u002FFloatBuffer\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLjava\u002Fnio\u002FFloatBuffer\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvTermCriteria\u003BI\u0029V == IntPtr.Zero)
     opencv_legacy.__\u003Cjniptr\u003EcvSnakeImage\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024IplImage\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint\u003BILjava\u002Fnio\u002FFloatBuffer\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLjava\u002Fnio\u002FFloatBuffer\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvTermCriteria\u003BI\u0029V = JNI.Frame.GetFuncPtr(opencv_legacy.__\u003CGetCallerID\u003E(), "com/googlecode/javacv/cpp/opencv_legacy", "cvSnakeImage", "(Lcom/googlecode/javacv/cpp/opencv_core$IplImage;Lcom/googlecode/javacv/cpp/opencv_core$CvPoint;ILjava/nio/FloatBuffer;Ljava/nio/FloatBuffer;Ljava/nio/FloatBuffer;ILcom/googlecode/javacv/cpp/opencv_core$CvSize;Lcom/googlecode/javacv/cpp/opencv_core$CvTermCriteria;I)V");
   // ISSUE: explicit reference operation
   IntPtr num1 = ((JNI.Frame) @frame).Enter(opencv_legacy.__\u003CGetCallerID\u003E());
   try
   {
     IntPtr num2 = num1;
     // ISSUE: explicit reference operation
     IntPtr num3 = ((JNI.Frame) @frame).MakeLocalRef((object) ClassLiteral<opencv_legacy>.Value);
     // ISSUE: explicit reference operation
     IntPtr num4 = ((JNI.Frame) @frame).MakeLocalRef(ii);
     // ISSUE: explicit reference operation
     IntPtr num5 = ((JNI.Frame) @frame).MakeLocalRef(cp);
     int num6 = i1;
     // ISSUE: explicit reference operation
     IntPtr num7 = ((JNI.Frame) @frame).MakeLocalRef((object) fb1);
     // ISSUE: explicit reference operation
     IntPtr num8 = ((JNI.Frame) @frame).MakeLocalRef((object) fb2);
     // ISSUE: explicit reference operation
     IntPtr num9 = ((JNI.Frame) @frame).MakeLocalRef((object) fb3);
     int num10 = i2;
     // ISSUE: explicit reference operation
     IntPtr num11 = ((JNI.Frame) @frame).MakeLocalRef(cs);
     // ISSUE: explicit reference operation
     IntPtr num12 = ((JNI.Frame) @frame).MakeLocalRef(ctc);
     int num13 = i3;
     // ISSUE: cast to a function pointer type
     // ISSUE: function pointer call
     __calli((__FnPtr<void (IntPtr, IntPtr, IntPtr, IntPtr, int, IntPtr, IntPtr, IntPtr, int, IntPtr, IntPtr, int)>) opencv_legacy.__\u003Cjniptr\u003EcvSnakeImage\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024IplImage\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint\u003BILjava\u002Fnio\u002FFloatBuffer\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLjava\u002Fnio\u002FFloatBuffer\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvTermCriteria\u003BI\u0029V)((int) num2, num3, num4, (int) num5, (IntPtr) num6, num7, num8, (int) num9, (IntPtr) num10, num11, num12, (IntPtr) num13);
   }
   catch (object ex)
   {
     Console.WriteLine((object) "*** exception in native code ***");
     Console.WriteLine(ex);
     throw;
   }
   finally
   {
     // ISSUE: explicit reference operation
     ((JNI.Frame) @frame).Leave();
   }
 }
Esempio n. 3
0
 public static void cvFindHandRegionA(object cpd1, int i1, object cs1, FloatBuffer fb, object csd, int i2, object cpd2, object cms, object cs2)
 {
   JNI.Frame frame = (JNI.Frame) null;
   if (opencv_legacy.__\u003Cjniptr\u003EcvFindHandRegionA\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint3D32f\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSeq\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize2D32f\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint3D32f\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvMemStorage\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSeq\u003B\u0029V == IntPtr.Zero)
     opencv_legacy.__\u003Cjniptr\u003EcvFindHandRegionA\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint3D32f\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSeq\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize2D32f\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint3D32f\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvMemStorage\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSeq\u003B\u0029V = JNI.Frame.GetFuncPtr(opencv_legacy.__\u003CGetCallerID\u003E(), "com/googlecode/javacv/cpp/opencv_legacy", "cvFindHandRegionA", "(Lcom/googlecode/javacv/cpp/opencv_core$CvPoint3D32f;ILcom/googlecode/javacv/cpp/opencv_core$CvSeq;Ljava/nio/FloatBuffer;Lcom/googlecode/javacv/cpp/opencv_core$CvSize2D32f;ILcom/googlecode/javacv/cpp/opencv_core$CvPoint3D32f;Lcom/googlecode/javacv/cpp/opencv_core$CvMemStorage;Lcom/googlecode/javacv/cpp/opencv_core$CvSeq;)V");
   // ISSUE: explicit reference operation
   IntPtr num1 = ((JNI.Frame) @frame).Enter(opencv_legacy.__\u003CGetCallerID\u003E());
   try
   {
     IntPtr num2 = num1;
     // ISSUE: explicit reference operation
     IntPtr num3 = ((JNI.Frame) @frame).MakeLocalRef((object) ClassLiteral<opencv_legacy>.Value);
     // ISSUE: explicit reference operation
     IntPtr num4 = ((JNI.Frame) @frame).MakeLocalRef(cpd1);
     int num5 = i1;
     // ISSUE: explicit reference operation
     IntPtr num6 = ((JNI.Frame) @frame).MakeLocalRef(cs1);
     // ISSUE: explicit reference operation
     IntPtr num7 = ((JNI.Frame) @frame).MakeLocalRef((object) fb);
     // ISSUE: explicit reference operation
     IntPtr num8 = ((JNI.Frame) @frame).MakeLocalRef(csd);
     int num9 = i2;
     // ISSUE: explicit reference operation
     IntPtr num10 = ((JNI.Frame) @frame).MakeLocalRef(cpd2);
     // ISSUE: explicit reference operation
     IntPtr num11 = ((JNI.Frame) @frame).MakeLocalRef(cms);
     // ISSUE: explicit reference operation
     IntPtr num12 = ((JNI.Frame) @frame).MakeLocalRef(cs2);
     // ISSUE: cast to a function pointer type
     // ISSUE: function pointer call
     __calli((__FnPtr<void (IntPtr, IntPtr, IntPtr, int, IntPtr, IntPtr, IntPtr, int, IntPtr, IntPtr, IntPtr)>) opencv_legacy.__\u003Cjniptr\u003EcvFindHandRegionA\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint3D32f\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSeq\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize2D32f\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint3D32f\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvMemStorage\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSeq\u003B\u0029V)(num2, num3, num4, num5, num6, num7, num8, num9, num10, num11, num12);
   }
   catch (object ex)
   {
     Console.WriteLine((object) "*** exception in native code ***");
     Console.WriteLine(ex);
     throw;
   }
   finally
   {
     // ISSUE: explicit reference operation
     ((JNI.Frame) @frame).Leave();
   }
 }
Esempio n. 4
0
 public static void glGetFloatv(int arg0, FloatBuffer arg1) { }
        // all setup and data loading goes here
        public void onSurfaceCreated(GL10 arg0, EGLConfig arg1)
        {
            var shaderProgram = gl.createProgram();


            var vs = gl.createShader( new Shaders.GeometryVertexShader() );
            var fs = gl.createShader(  new Shaders.GeometryVertexShader() );


            gl.attachShader(shaderProgram, vs);
            gl.attachShader(shaderProgram, fs);


            gl.linkProgram(shaderProgram);

            
            gl.useProgram(shaderProgram);
            positionAttribLocation = gl.getAttribLocation(shaderProgram, "position");

            // setup geometry
            float[] verticesData = 
            { 
                0.0f, 0.5f, 0.0f, 
                -0.5f, -0.5f, 0.0f, 
                0.5f,  -0.5f, 0.0f 
            };

            vertices = ByteBuffer
                    .allocateDirect(verticesData.Length * 4)
                    .order(ByteOrder.nativeOrder()).asFloatBuffer();
            vertices.put(verticesData).position(0);
        }
            public LessonTwoRenderer()
            {
                this.gl = (ScriptCoreLib.JavaScript.WebGL.WebGLRenderingContext)(object)__gl;

                #region  Define points for a cube.

                // X, Y, Z
                float[] cubePositionData =
		        {
				        // In OpenGL counter-clockwise winding is default. This means that when we look at a triangle, 
				        // if the points are counter-clockwise we are looking at the "front". If not we are looking at
				        // the back. OpenGL has an optimization where all back-facing triangles are culled, since they
				        // usually represent the backside of an object and aren't visible anyways.
				
				        // Front face
				        -1.0f, 1.0f, 1.0f,				
				        -1.0f, -1.0f, 1.0f,
				        1.0f, 1.0f, 1.0f, 
				        -1.0f, -1.0f, 1.0f, 				
				        1.0f, -1.0f, 1.0f,
				        1.0f, 1.0f, 1.0f,
				
				        // Right face
				        1.0f, 1.0f, 1.0f,				
				        1.0f, -1.0f, 1.0f,
				        1.0f, 1.0f, -1.0f,
				        1.0f, -1.0f, 1.0f,				
				        1.0f, -1.0f, -1.0f,
				        1.0f, 1.0f, -1.0f,
				
				        // Back face
				        1.0f, 1.0f, -1.0f,				
				        1.0f, -1.0f, -1.0f,
				        -1.0f, 1.0f, -1.0f,
				        1.0f, -1.0f, -1.0f,				
				        -1.0f, -1.0f, -1.0f,
				        -1.0f, 1.0f, -1.0f,
				
				        // Left face
				        -1.0f, 1.0f, -1.0f,				
				        -1.0f, -1.0f, -1.0f,
				        -1.0f, 1.0f, 1.0f, 
				        -1.0f, -1.0f, -1.0f,				
				        -1.0f, -1.0f, 1.0f, 
				        -1.0f, 1.0f, 1.0f, 
				
				        // Top face
				        -1.0f, 1.0f, -1.0f,				
				        -1.0f, 1.0f, 1.0f, 
				        1.0f, 1.0f, -1.0f, 
				        -1.0f, 1.0f, 1.0f, 				
				        1.0f, 1.0f, 1.0f, 
				        1.0f, 1.0f, -1.0f,
				
				        // Bottom face
				        1.0f, -1.0f, -1.0f,				
				        1.0f, -1.0f, 1.0f, 
				        -1.0f, -1.0f, -1.0f,
				        1.0f, -1.0f, 1.0f, 				
				        -1.0f, -1.0f, 1.0f,
				        -1.0f, -1.0f, -1.0f,
		        };

                // R, G, B, A
                float[] cubeColorData =
		        {				
				        // Front face (red)
				        1.0f, 0.0f, 0.0f, 1.0f,				
				        1.0f, 0.0f, 0.0f, 1.0f,
				        1.0f, 0.0f, 0.0f, 1.0f,
				        1.0f, 0.0f, 0.0f, 1.0f,				
				        1.0f, 0.0f, 0.0f, 1.0f,
				        1.0f, 0.0f, 0.0f, 1.0f,
				
				        // Right face (green)
				        0.0f, 1.0f, 0.0f, 1.0f,				
				        0.0f, 1.0f, 0.0f, 1.0f,
				        0.0f, 1.0f, 0.0f, 1.0f,
				        0.0f, 1.0f, 0.0f, 1.0f,				
				        0.0f, 1.0f, 0.0f, 1.0f,
				        0.0f, 1.0f, 0.0f, 1.0f,
				
				        // Back face (blue)
				        0.0f, 0.0f, 1.0f, 1.0f,				
				        0.0f, 0.0f, 1.0f, 1.0f,
				        0.0f, 0.0f, 1.0f, 1.0f,
				        0.0f, 0.0f, 1.0f, 1.0f,				
				        0.0f, 0.0f, 1.0f, 1.0f,
				        0.0f, 0.0f, 1.0f, 1.0f,
				
				        // Left face (yellow)
				        1.0f, 1.0f, 0.0f, 1.0f,				
				        1.0f, 1.0f, 0.0f, 1.0f,
				        1.0f, 1.0f, 0.0f, 1.0f,
				        1.0f, 1.0f, 0.0f, 1.0f,				
				        1.0f, 1.0f, 0.0f, 1.0f,
				        1.0f, 1.0f, 0.0f, 1.0f,
				
				        // Top face (cyan)
				        0.0f, 1.0f, 1.0f, 1.0f,				
				        0.0f, 1.0f, 1.0f, 1.0f,
				        0.0f, 1.0f, 1.0f, 1.0f,
				        0.0f, 1.0f, 1.0f, 1.0f,				
				        0.0f, 1.0f, 1.0f, 1.0f,
				        0.0f, 1.0f, 1.0f, 1.0f,
				
				        // Bottom face (magenta)
				        1.0f, 0.0f, 1.0f, 1.0f,				
				        1.0f, 0.0f, 1.0f, 1.0f,
				        1.0f, 0.0f, 1.0f, 1.0f,
				        1.0f, 0.0f, 1.0f, 1.0f,				
				        1.0f, 0.0f, 1.0f, 1.0f,
				        1.0f, 0.0f, 1.0f, 1.0f
		        };

                // X, Y, Z
                // The normal is used in light calculations and is a vector which points
                // orthogonal to the plane of the surface. For a cube model, the normals
                // should be orthogonal to the points of each face.
                float[] cubeNormalData =
		        {												
				        // Front face
				        0.0f, 0.0f, 1.0f,				
				        0.0f, 0.0f, 1.0f,
				        0.0f, 0.0f, 1.0f,
				        0.0f, 0.0f, 1.0f,				
				        0.0f, 0.0f, 1.0f,
				        0.0f, 0.0f, 1.0f,
				
				        // Right face 
				        1.0f, 0.0f, 0.0f,				
				        1.0f, 0.0f, 0.0f,
				        1.0f, 0.0f, 0.0f,
				        1.0f, 0.0f, 0.0f,				
				        1.0f, 0.0f, 0.0f,
				        1.0f, 0.0f, 0.0f,
				
				        // Back face 
				        0.0f, 0.0f, -1.0f,				
				        0.0f, 0.0f, -1.0f,
				        0.0f, 0.0f, -1.0f,
				        0.0f, 0.0f, -1.0f,				
				        0.0f, 0.0f, -1.0f,
				        0.0f, 0.0f, -1.0f,
				
				        // Left face 
				        -1.0f, 0.0f, 0.0f,				
				        -1.0f, 0.0f, 0.0f,
				        -1.0f, 0.0f, 0.0f,
				        -1.0f, 0.0f, 0.0f,				
				        -1.0f, 0.0f, 0.0f,
				        -1.0f, 0.0f, 0.0f,
				
				        // Top face 
				        0.0f, 1.0f, 0.0f,			
				        0.0f, 1.0f, 0.0f,
				        0.0f, 1.0f, 0.0f,
				        0.0f, 1.0f, 0.0f,				
				        0.0f, 1.0f, 0.0f,
				        0.0f, 1.0f, 0.0f,
				
				        // Bottom face 
				        0.0f, -1.0f, 0.0f,			
				        0.0f, -1.0f, 0.0f,
				        0.0f, -1.0f, 0.0f,
				        0.0f, -1.0f, 0.0f,				
				        0.0f, -1.0f, 0.0f,
				        0.0f, -1.0f, 0.0f
		        };
                #endregion

                // Initialize the buffers.
                mCubePositions = ByteBuffer.allocateDirect(cubePositionData.Length * mBytesPerFloat)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
                mCubePositions.put(cubePositionData).position(0);

                mCubeColors = ByteBuffer.allocateDirect(cubeColorData.Length * mBytesPerFloat)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
                mCubeColors.put(cubeColorData).position(0);

                mCubeNormals = ByteBuffer.allocateDirect(cubeNormalData.Length * mBytesPerFloat)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
                mCubeNormals.put(cubeNormalData).position(0);
            }
Esempio n. 7
0
 public static void av_dct_calc(object dctc, FloatBuffer fb)
 {
   JNI.Frame frame = (JNI.Frame) null;
   if (avcodec.__\u003Cjniptr\u003Eav_dct_calc\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Favcodec\u0024DCTContext\u003BLjava\u002Fnio\u002FFloatBuffer\u003B\u0029V == IntPtr.Zero)
     avcodec.__\u003Cjniptr\u003Eav_dct_calc\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Favcodec\u0024DCTContext\u003BLjava\u002Fnio\u002FFloatBuffer\u003B\u0029V = JNI.Frame.GetFuncPtr(avcodec.__\u003CGetCallerID\u003E(), "com/googlecode/javacv/cpp/avcodec", "av_dct_calc", "(Lcom/googlecode/javacv/cpp/avcodec$DCTContext;Ljava/nio/FloatBuffer;)V");
   // ISSUE: explicit reference operation
   IntPtr num1 = ((JNI.Frame) @frame).Enter(avcodec.__\u003CGetCallerID\u003E());
   try
   {
     IntPtr num2 = num1;
     // ISSUE: explicit reference operation
     IntPtr num3 = ((JNI.Frame) @frame).MakeLocalRef((object) ClassLiteral<avcodec>.Value);
     // ISSUE: explicit reference operation
     IntPtr num4 = ((JNI.Frame) @frame).MakeLocalRef(dctc);
     // ISSUE: explicit reference operation
     IntPtr num5 = ((JNI.Frame) @frame).MakeLocalRef((object) fb);
     // ISSUE: cast to a function pointer type
     // ISSUE: function pointer call
     __calli((__FnPtr<void (IntPtr, IntPtr, IntPtr, IntPtr)>) avcodec.__\u003Cjniptr\u003Eav_dct_calc\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Favcodec\u0024DCTContext\u003BLjava\u002Fnio\u002FFloatBuffer\u003B\u0029V)(num2, num3, num4, num5);
   }
   catch (object ex)
   {
     Console.WriteLine((object) "*** exception in native code ***");
     Console.WriteLine(ex);
     throw;
   }
   finally
   {
     // ISSUE: explicit reference operation
     ((JNI.Frame) @frame).Leave();
   }
 }
Esempio n. 8
0
 public static void cvCalcAffineFlowPyrLK(object ca1, object ca2, object ca3, object ca4, object cpd1, object cpd2, FloatBuffer fb1, int i1, object cs, int i2, byte[] barr, FloatBuffer fb2, object ctc, int i3)
 {
   JNI.Frame frame = (JNI.Frame) null;
   if (opencv_video.__\u003Cjniptr\u003EcvCalcAffineFlowPyrLK\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint2D32f\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint2D32f\u003BLjava\u002Fnio\u002FFloatBuffer\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BI\u005BBLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvTermCriteria\u003BI\u0029V == IntPtr.Zero)
     opencv_video.__\u003Cjniptr\u003EcvCalcAffineFlowPyrLK\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint2D32f\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint2D32f\u003BLjava\u002Fnio\u002FFloatBuffer\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BI\u005BBLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvTermCriteria\u003BI\u0029V = JNI.Frame.GetFuncPtr(opencv_video.__\u003CGetCallerID\u003E(), "com/googlecode/javacv/cpp/opencv_video", "cvCalcAffineFlowPyrLK", "(Lcom/googlecode/javacv/cpp/opencv_core$CvArr;Lcom/googlecode/javacv/cpp/opencv_core$CvArr;Lcom/googlecode/javacv/cpp/opencv_core$CvArr;Lcom/googlecode/javacv/cpp/opencv_core$CvArr;Lcom/googlecode/javacv/cpp/opencv_core$CvPoint2D32f;Lcom/googlecode/javacv/cpp/opencv_core$CvPoint2D32f;Ljava/nio/FloatBuffer;ILcom/googlecode/javacv/cpp/opencv_core$CvSize;I[BLjava/nio/FloatBuffer;Lcom/googlecode/javacv/cpp/opencv_core$CvTermCriteria;I)V");
   // ISSUE: explicit reference operation
   IntPtr num1 = ((JNI.Frame) @frame).Enter(opencv_video.__\u003CGetCallerID\u003E());
   try
   {
     IntPtr num2 = num1;
     // ISSUE: explicit reference operation
     IntPtr num3 = ((JNI.Frame) @frame).MakeLocalRef((object) ClassLiteral<opencv_video>.Value);
     // ISSUE: explicit reference operation
     IntPtr num4 = ((JNI.Frame) @frame).MakeLocalRef(ca1);
     // ISSUE: explicit reference operation
     IntPtr num5 = ((JNI.Frame) @frame).MakeLocalRef(ca2);
     // ISSUE: explicit reference operation
     IntPtr num6 = ((JNI.Frame) @frame).MakeLocalRef(ca3);
     // ISSUE: explicit reference operation
     IntPtr num7 = ((JNI.Frame) @frame).MakeLocalRef(ca4);
     // ISSUE: explicit reference operation
     IntPtr num8 = ((JNI.Frame) @frame).MakeLocalRef(cpd1);
     // ISSUE: explicit reference operation
     IntPtr num9 = ((JNI.Frame) @frame).MakeLocalRef(cpd2);
     // ISSUE: explicit reference operation
     IntPtr num10 = ((JNI.Frame) @frame).MakeLocalRef((object) fb1);
     int num11 = i1;
     // ISSUE: explicit reference operation
     IntPtr num12 = ((JNI.Frame) @frame).MakeLocalRef(cs);
     int num13 = i2;
     // ISSUE: explicit reference operation
     IntPtr num14 = ((JNI.Frame) @frame).MakeLocalRef((object) barr);
     // ISSUE: explicit reference operation
     IntPtr num15 = ((JNI.Frame) @frame).MakeLocalRef((object) fb2);
     // ISSUE: explicit reference operation
     IntPtr num16 = ((JNI.Frame) @frame).MakeLocalRef(ctc);
     int num17 = i3;
     // ISSUE: cast to a function pointer type
     // ISSUE: function pointer call
     __calli((__FnPtr<void (IntPtr, IntPtr, IntPtr, IntPtr, IntPtr, IntPtr, IntPtr, IntPtr, IntPtr, int, IntPtr, int, IntPtr, IntPtr, IntPtr, int)>) opencv_video.__\u003Cjniptr\u003EcvCalcAffineFlowPyrLK\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint2D32f\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint2D32f\u003BLjava\u002Fnio\u002FFloatBuffer\u003BILcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BI\u005BBLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvTermCriteria\u003BI\u0029V)((int) num2, num3, num4, num5, (int) num6, num7, (int) num8, num9, num10, (IntPtr) num11, num12, (IntPtr) num13, num14, num15, num16, (IntPtr) num17);
   }
   catch (object ex)
   {
     Console.WriteLine((object) "*** exception in native code ***");
     Console.WriteLine(ex);
     throw;
   }
   finally
   {
     // ISSUE: explicit reference operation
     ((JNI.Frame) @frame).Leave();
   }
 }
Esempio n. 9
0
 public static void glUniformMatrix4fv(int arg0, int arg1, bool arg2, FloatBuffer arg3) { }
Esempio n. 10
0
 public static void glVertexAttrib4fv(int arg0, FloatBuffer arg1) { }
Esempio n. 11
0
 public static void glUniform4fv(int arg0, int arg1, FloatBuffer arg2) { }
Esempio n. 12
0
 public static void glTexParameterfv(int arg0, int arg1, FloatBuffer arg2) { }
Esempio n. 13
0
 public static void glGetVertexAttribfv(int arg0, int arg1, FloatBuffer arg2) { }
Esempio n. 14
0
 public static void cvEigenProjection(object p1, int i1, int i2, object p2, FloatBuffer fb, object ii1, object ii2)
 {
   JNI.Frame frame = (JNI.Frame) null;
   if (opencv_legacy.__\u003Cjniptr\u003EcvEigenProjection\u0028Lcom\u002Fgooglecode\u002Fjavacpp\u002FPointer\u003BIILcom\u002Fgooglecode\u002Fjavacpp\u002FPointer\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024IplImage\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024IplImage\u003B\u0029V == IntPtr.Zero)
     opencv_legacy.__\u003Cjniptr\u003EcvEigenProjection\u0028Lcom\u002Fgooglecode\u002Fjavacpp\u002FPointer\u003BIILcom\u002Fgooglecode\u002Fjavacpp\u002FPointer\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024IplImage\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024IplImage\u003B\u0029V = JNI.Frame.GetFuncPtr(opencv_legacy.__\u003CGetCallerID\u003E(), "com/googlecode/javacv/cpp/opencv_legacy", "cvEigenProjection", "(Lcom/googlecode/javacpp/Pointer;IILcom/googlecode/javacpp/Pointer;Ljava/nio/FloatBuffer;Lcom/googlecode/javacv/cpp/opencv_core$IplImage;Lcom/googlecode/javacv/cpp/opencv_core$IplImage;)V");
   // ISSUE: explicit reference operation
   IntPtr num1 = ((JNI.Frame) @frame).Enter(opencv_legacy.__\u003CGetCallerID\u003E());
   try
   {
     IntPtr num2 = num1;
     // ISSUE: explicit reference operation
     IntPtr num3 = ((JNI.Frame) @frame).MakeLocalRef((object) ClassLiteral<opencv_legacy>.Value);
     // ISSUE: explicit reference operation
     IntPtr num4 = ((JNI.Frame) @frame).MakeLocalRef(p1);
     int num5 = i1;
     int num6 = i2;
     // ISSUE: explicit reference operation
     IntPtr num7 = ((JNI.Frame) @frame).MakeLocalRef(p2);
     // ISSUE: explicit reference operation
     IntPtr num8 = ((JNI.Frame) @frame).MakeLocalRef((object) fb);
     // ISSUE: explicit reference operation
     IntPtr num9 = ((JNI.Frame) @frame).MakeLocalRef(ii1);
     // ISSUE: explicit reference operation
     IntPtr num10 = ((JNI.Frame) @frame).MakeLocalRef(ii2);
     // ISSUE: cast to a function pointer type
     // ISSUE: function pointer call
     __calli((__FnPtr<void (IntPtr, IntPtr, IntPtr, int, int, IntPtr, IntPtr, IntPtr, IntPtr)>) opencv_legacy.__\u003Cjniptr\u003EcvEigenProjection\u0028Lcom\u002Fgooglecode\u002Fjavacpp\u002FPointer\u003BIILcom\u002Fgooglecode\u002Fjavacpp\u002FPointer\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024IplImage\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024IplImage\u003B\u0029V)(num2, num3, num4, (IntPtr) num5, num6, (int) num7, num8, num9, num10);
   }
   catch (object ex)
   {
     Console.WriteLine((object) "*** exception in native code ***");
     Console.WriteLine(ex);
     throw;
   }
   finally
   {
     // ISSUE: explicit reference operation
     ((JNI.Frame) @frame).Leave();
   }
 }
			//script: error JSC1000: Java : Opcode not implemented: stelem.r4 at AndroidOpenGLESLesson5Activity.Activities.AndroidOpenGLESLesson5Activity+LessonFiveRenderer+<>c.<.ctor>b__17_0

			public LessonFiveRenderer(Context activityContext)
			{
				this.gl = (ScriptCoreLib.JavaScript.WebGL.WebGLRenderingContext)(object)__gl;

				mActivityContext = activityContext;

				#region generateCubeData
				Func<f[], f[], f[], f[], f[], f[], f[], f[], int, f[]> generateCubeData =
					(f[] point1,
					 f[] point2,
					 f[] point3,
					 f[] point4,
					 f[] point5,
					 f[] point6,
					 f[] point7,
					 f[] point8,
					 int elementsPerPoint) =>
					{
						// Given a cube with the points defined as follows:
						// front left top, front right top, front left bottom, front right bottom,
						// back left top, back right top, back left bottom, back right bottom,		
						// return an array of 6 sides, 2 triangles per side, 3 vertices per triangle, and 4 floats per vertex.
						int FRONT = 0;
						int RIGHT = 1;
						int BACK = 2;
						int LEFT = 3;
						int TOP = 4;
						int BOTTOM = 5;

						int size = elementsPerPoint * 6 * 6;
						float[] cubeData = new float[size];

						for (int face = 0; face < 6; face++)
						{
							// Relative to the side, p1 = top left, p2 = top right, p3 = bottom left, p4 = bottom right
							float[] p1, p2, p3, p4;

							// Select the points for this face
							if (face == FRONT)
							{
								p1 = point1; p2 = point2; p3 = point3; p4 = point4;
							}
							else if (face == RIGHT)
							{
								p1 = point2; p2 = point6; p3 = point4; p4 = point8;
							}
							else if (face == BACK)
							{
								p1 = point6; p2 = point5; p3 = point8; p4 = point7;
							}
							else if (face == LEFT)
							{
								p1 = point5; p2 = point1; p3 = point7; p4 = point3;
							}
							else if (face == TOP)
							{
								p1 = point5; p2 = point6; p3 = point1; p4 = point2;
							}
							else // if (side == BOTTOM)
							{
								p1 = point8; p2 = point7; p3 = point4; p4 = point3;
							}

							// In OpenGL counter-clockwise winding is default. This means that when we look at a triangle, 
							// if the points are counter-clockwise we are looking at the "front". If not we are looking at
							// the back. OpenGL has an optimization where all back-facing triangles are culled, since they
							// usually represent the backside of an object and aren't visible anyways.

							// Build the triangles
							//  1---3,6
							//  | / |
							// 2,4--5
							int offset = face * elementsPerPoint * 6;

							for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p1[i]; }
							for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p3[i]; }
							for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p2[i]; }
							for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p3[i]; }
							for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p4[i]; }
							for (int i = 0; i < elementsPerPoint; i++) { cubeData[offset++] = p2[i]; }
						}

						return cubeData;
					};
				#endregion


				// Define points for a cube.
				// X, Y, Z
				float[] p1p = { -1.0f, 1.0f, 1.0f };
				float[] p2p = { 1.0f, 1.0f, 1.0f };
				float[] p3p = { -1.0f, -1.0f, 1.0f };
				float[] p4p = { 1.0f, -1.0f, 1.0f };
				float[] p5p = { -1.0f, 1.0f, -1.0f };
				float[] p6p = { 1.0f, 1.0f, -1.0f };
				float[] p7p = { -1.0f, -1.0f, -1.0f };
				float[] p8p = { 1.0f, -1.0f, -1.0f };

				float[] cubePositionData = generateCubeData(p1p, p2p, p3p, p4p, p5p, p6p, p7p, p8p, p1p.Length);

				// Points of the cube: color information
				// R, G, B, A
				float[] p1c = { 1.0f, 0.0f, 0.0f, 1.0f };		// red			
				float[] p2c = { 1.0f, 0.0f, 1.0f, 1.0f };		// magenta
				float[] p3c = { 0.0f, 0.0f, 0.0f, 1.0f };		// black
				float[] p4c = { 0.0f, 0.0f, 1.0f, 1.0f };		// blue
				float[] p5c = { 1.0f, 1.0f, 0.0f, 1.0f };		// yellow
				float[] p6c = { 1.0f, 1.0f, 1.0f, 1.0f };		// white
				float[] p7c = { 0.0f, 1.0f, 0.0f, 1.0f };		// green
				float[] p8c = { 0.0f, 1.0f, 1.0f, 1.0f };		// cyan

				float[] cubeColorData = generateCubeData(p1c, p2c, p3c, p4c, p5c, p6c, p7c, p8c, p1c.Length);

				// Initialize the buffers.
				mCubePositions = ByteBuffer.allocateDirect(cubePositionData.Length * mBytesPerFloat)
				.order(ByteOrder.nativeOrder()).asFloatBuffer();
				mCubePositions.put(cubePositionData).position(0);

				mCubeColors = ByteBuffer.allocateDirect(cubeColorData.Length * mBytesPerFloat)
				.order(ByteOrder.nativeOrder()).asFloatBuffer();
				mCubeColors.put(cubeColorData).position(0);
			}
Esempio n. 16
0
 public static void cvImgToObs_DCT(object ca, FloatBuffer fb, object cs1, object cs2, object cs3)
 {
   JNI.Frame frame = (JNI.Frame) null;
   if (opencv_legacy.__\u003Cjniptr\u003EcvImgToObs_DCT\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003B\u0029V == IntPtr.Zero)
     opencv_legacy.__\u003Cjniptr\u003EcvImgToObs_DCT\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003B\u0029V = JNI.Frame.GetFuncPtr(opencv_legacy.__\u003CGetCallerID\u003E(), "com/googlecode/javacv/cpp/opencv_legacy", "cvImgToObs_DCT", "(Lcom/googlecode/javacv/cpp/opencv_core$CvArr;Ljava/nio/FloatBuffer;Lcom/googlecode/javacv/cpp/opencv_core$CvSize;Lcom/googlecode/javacv/cpp/opencv_core$CvSize;Lcom/googlecode/javacv/cpp/opencv_core$CvSize;)V");
   // ISSUE: explicit reference operation
   IntPtr num1 = ((JNI.Frame) @frame).Enter(opencv_legacy.__\u003CGetCallerID\u003E());
   try
   {
     IntPtr num2 = num1;
     // ISSUE: explicit reference operation
     IntPtr num3 = ((JNI.Frame) @frame).MakeLocalRef((object) ClassLiteral<opencv_legacy>.Value);
     // ISSUE: explicit reference operation
     IntPtr num4 = ((JNI.Frame) @frame).MakeLocalRef(ca);
     // ISSUE: explicit reference operation
     IntPtr num5 = ((JNI.Frame) @frame).MakeLocalRef((object) fb);
     // ISSUE: explicit reference operation
     IntPtr num6 = ((JNI.Frame) @frame).MakeLocalRef(cs1);
     // ISSUE: explicit reference operation
     IntPtr num7 = ((JNI.Frame) @frame).MakeLocalRef(cs2);
     // ISSUE: explicit reference operation
     IntPtr num8 = ((JNI.Frame) @frame).MakeLocalRef(cs3);
     // ISSUE: cast to a function pointer type
     // ISSUE: function pointer call
     __calli((__FnPtr<void (IntPtr, IntPtr, IntPtr, IntPtr, IntPtr, IntPtr, IntPtr)>) opencv_legacy.__\u003Cjniptr\u003EcvImgToObs_DCT\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvArr\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvSize\u003B\u0029V)(num2, num3, num4, num5, num6, num7, num8);
   }
   catch (object ex)
   {
     Console.WriteLine((object) "*** exception in native code ***");
     Console.WriteLine(ex);
     throw;
   }
   finally
   {
     // ISSUE: explicit reference operation
     ((JNI.Frame) @frame).Leave();
   }
 }
Esempio n. 17
0
 public static void cvPOSIT(object cposito, object cpd, double d, object ctc, FloatBuffer fb1, FloatBuffer fb2)
 {
   JNI.Frame frame = (JNI.Frame) null;
   if (opencv_calib3d.__\u003Cjniptr\u003EcvPOSIT\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_calib3d\u0024CvPOSITObject\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint2D32f\u003BDLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvTermCriteria\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLjava\u002Fnio\u002FFloatBuffer\u003B\u0029V == IntPtr.Zero)
     opencv_calib3d.__\u003Cjniptr\u003EcvPOSIT\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_calib3d\u0024CvPOSITObject\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint2D32f\u003BDLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvTermCriteria\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLjava\u002Fnio\u002FFloatBuffer\u003B\u0029V = JNI.Frame.GetFuncPtr(opencv_calib3d.__\u003CGetCallerID\u003E(), "com/googlecode/javacv/cpp/opencv_calib3d", "cvPOSIT", "(Lcom/googlecode/javacv/cpp/opencv_calib3d$CvPOSITObject;Lcom/googlecode/javacv/cpp/opencv_core$CvPoint2D32f;DLcom/googlecode/javacv/cpp/opencv_core$CvTermCriteria;Ljava/nio/FloatBuffer;Ljava/nio/FloatBuffer;)V");
   // ISSUE: explicit reference operation
   IntPtr num1 = ((JNI.Frame) @frame).Enter(opencv_calib3d.__\u003CGetCallerID\u003E());
   try
   {
     IntPtr num2 = num1;
     // ISSUE: explicit reference operation
     IntPtr num3 = ((JNI.Frame) @frame).MakeLocalRef((object) ClassLiteral<opencv_calib3d>.Value);
     // ISSUE: explicit reference operation
     IntPtr num4 = ((JNI.Frame) @frame).MakeLocalRef(cposito);
     // ISSUE: explicit reference operation
     IntPtr num5 = ((JNI.Frame) @frame).MakeLocalRef(cpd);
     double num6 = d;
     // ISSUE: explicit reference operation
     IntPtr num7 = ((JNI.Frame) @frame).MakeLocalRef(ctc);
     // ISSUE: explicit reference operation
     IntPtr num8 = ((JNI.Frame) @frame).MakeLocalRef((object) fb1);
     // ISSUE: explicit reference operation
     IntPtr num9 = ((JNI.Frame) @frame).MakeLocalRef((object) fb2);
     // ISSUE: cast to a function pointer type
     // ISSUE: function pointer call
     __calli((__FnPtr<void (IntPtr, IntPtr, IntPtr, IntPtr, double, IntPtr, IntPtr, IntPtr)>) opencv_calib3d.__\u003Cjniptr\u003EcvPOSIT\u0028Lcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_calib3d\u0024CvPOSITObject\u003BLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvPoint2D32f\u003BDLcom\u002Fgooglecode\u002Fjavacv\u002Fcpp\u002Fopencv_core\u0024CvTermCriteria\u003BLjava\u002Fnio\u002FFloatBuffer\u003BLjava\u002Fnio\u002FFloatBuffer\u003B\u0029V)(num2, num3, num4, (double) num5, (IntPtr) num6, num7, num8, num9);
   }
   catch (object ex)
   {
     Console.WriteLine((object) "*** exception in native code ***");
     Console.WriteLine(ex);
     throw;
   }
   finally
   {
     // ISSUE: explicit reference operation
     ((JNI.Frame) @frame).Leave();
   }
 }
            private void initTriangle()
            {
                // float has 4 bytes
                ByteBuffer vbb = ByteBuffer.allocateDirect(_nrOfVertices * 3 * 4);
                vbb.order(ByteOrder.nativeOrder());
                _vertexBuffer = vbb.asFloatBuffer();

                // short has 4 bytes
                ByteBuffer ibb = ByteBuffer.allocateDirect(_nrOfVertices * 2);
                ibb.order(ByteOrder.nativeOrder());
                _indexBuffer = ibb.asShortBuffer();

                // float has 4 bytes, 4 colors (RGBA) * number of vertices * 4 bytes
                ByteBuffer cbb = ByteBuffer.allocateDirect(4 * _nrOfVertices * 4);
                cbb.order(ByteOrder.nativeOrder());
                _colorBuffer = cbb.asFloatBuffer();

                float[] coords = {
                    -0.5f, -0.5f, 0f, // (x1, y1, z1)
                    0.5f, -0.5f, 0f, // (x2, y2, z2)
                    0.5f, 0.5f, 0f // (x3, y3, z3)
                };

                float[] colors = {
                    1f, 0f, 0f, 1f, // point 1
                    0f, 1f, 0f, 1f, // point 2
                    0f, 0f, 1f, 1f, // point 3
                };

                _vertexBuffer.put(coords);
                _indexBuffer.put(_indicesArray);
                _colorBuffer.put(colors);

                _vertexBuffer.position(0);
                _indexBuffer.position(0);
                _colorBuffer.position(0);
            }
Esempio n. 19
0
 public override FloatBuffer Put(FloatBuffer src)
 {
     throw new ReadOnlyBufferException();
 }
        public void onSurfaceCreated(javax.microedition.khronos.egl.EGLConfig value)
        {
            Console.WriteLine("enter AndroidCardboardExperiment onSurfaceCreated");

            GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well.

            ByteBuffer bbVertices = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_COORDS.Length * 4);
            bbVertices.order(ByteOrder.nativeOrder());
            cubeVertices = bbVertices.asFloatBuffer();
            cubeVertices.put(WorldLayoutData.CUBE_COORDS);
            cubeVertices.position(0);

            ByteBuffer bbColors = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_COLORS.Length * 4);
            bbColors.order(ByteOrder.nativeOrder());
            cubeColors = bbColors.asFloatBuffer();
            cubeColors.put(WorldLayoutData.CUBE_COLORS);
            cubeColors.position(0);

            ByteBuffer bbFoundColors = ByteBuffer.allocateDirect(
                WorldLayoutData.CUBE_FOUND_COLORS.Length * 4);
            bbFoundColors.order(ByteOrder.nativeOrder());
            cubeFoundColors = bbFoundColors.asFloatBuffer();
            cubeFoundColors.put(WorldLayoutData.CUBE_FOUND_COLORS);
            cubeFoundColors.position(0);

            ByteBuffer bbNormals = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_NORMALS.Length * 4);
            bbNormals.order(ByteOrder.nativeOrder());
            cubeNormals = bbNormals.asFloatBuffer();
            cubeNormals.put(WorldLayoutData.CUBE_NORMALS);
            cubeNormals.position(0);

            // make a floor
            ByteBuffer bbFloorVertices = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_COORDS.Length * 4);
            bbFloorVertices.order(ByteOrder.nativeOrder());
            floorVertices = bbFloorVertices.asFloatBuffer();
            floorVertices.put(WorldLayoutData.FLOOR_COORDS);
            floorVertices.position(0);

            ByteBuffer bbFloorNormals = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_NORMALS.Length * 4);
            bbFloorNormals.order(ByteOrder.nativeOrder());
            floorNormals = bbFloorNormals.asFloatBuffer();
            floorNormals.put(WorldLayoutData.FLOOR_NORMALS);
            floorNormals.position(0);

            ByteBuffer bbFloorColors = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_COLORS.Length * 4);
            bbFloorColors.order(ByteOrder.nativeOrder());
            floorColors = bbFloorColors.asFloatBuffer();
            floorColors.put(WorldLayoutData.FLOOR_COLORS);
            floorColors.position(0);


            #region loadGLShader
            Func<int, string, int> loadGLShader = (int type, string code) =>
            {
                int shader = GLES20.glCreateShader(type);
                GLES20.glShaderSource(shader, code);
                GLES20.glCompileShader(shader);

                // Get the compilation status.
                int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                // If the compilation failed, delete the shader.
                if (compileStatus[0] == 0)
                {
                    Console.WriteLine("Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
                    GLES20.glDeleteShader(shader);
                    shader = 0;
                }

                if (shader == 0)
                {
                    throw new Exception("Error creating shader.");
                }

                return shader;
            };
            #endregion


            int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, new AndroidCardboardExperiment.Shaders.light_vertexVertexShader().ToString());
            int gridShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, new AndroidCardboardExperiment.Shaders.grid_fragmentFragmentShader().ToString());
            int passthroughShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, new AndroidCardboardExperiment.Shaders.passthrough_fragmentFragmentShader().ToString());

            cubeProgram = GLES20.glCreateProgram();
            GLES20.glAttachShader(cubeProgram, vertexShader);
            GLES20.glAttachShader(cubeProgram, passthroughShader);
            GLES20.glLinkProgram(cubeProgram);
            GLES20.glUseProgram(cubeProgram);

            checkGLError("Cube program");

            cubePositionParam = GLES20.glGetAttribLocation(cubeProgram, "a_Position");
            cubeNormalParam = GLES20.glGetAttribLocation(cubeProgram, "a_Normal");
            cubeColorParam = GLES20.glGetAttribLocation(cubeProgram, "a_Color");

            cubeModelParam = GLES20.glGetUniformLocation(cubeProgram, "u_Model");
            cubeModelViewParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVMatrix");
            cubeModelViewProjectionParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVP");
            cubeLightPosParam = GLES20.glGetUniformLocation(cubeProgram, "u_LightPos");

            GLES20.glEnableVertexAttribArray(cubePositionParam);
            GLES20.glEnableVertexAttribArray(cubeNormalParam);
            GLES20.glEnableVertexAttribArray(cubeColorParam);

            checkGLError("Cube program params");

            floorProgram = GLES20.glCreateProgram();
            GLES20.glAttachShader(floorProgram, vertexShader);
            GLES20.glAttachShader(floorProgram, gridShader);
            GLES20.glLinkProgram(floorProgram);
            GLES20.glUseProgram(floorProgram);

            checkGLError("Floor program");

            floorModelParam = GLES20.glGetUniformLocation(floorProgram, "u_Model");
            floorModelViewParam = GLES20.glGetUniformLocation(floorProgram, "u_MVMatrix");
            floorModelViewProjectionParam = GLES20.glGetUniformLocation(floorProgram, "u_MVP");
            floorLightPosParam = GLES20.glGetUniformLocation(floorProgram, "u_LightPos");

            floorPositionParam = GLES20.glGetAttribLocation(floorProgram, "a_Position");
            floorNormalParam = GLES20.glGetAttribLocation(floorProgram, "a_Normal");
            floorColorParam = GLES20.glGetAttribLocation(floorProgram, "a_Color");

            GLES20.glEnableVertexAttribArray(floorPositionParam);
            GLES20.glEnableVertexAttribArray(floorNormalParam);
            GLES20.glEnableVertexAttribArray(floorColorParam);

            checkGLError("Floor program params");

            GLES20.glEnable(GLES20.GL_DEPTH_TEST);

            // Object first appears directly in front of user.
            Matrix.setIdentityM(modelCube, 0);
            Matrix.translateM(modelCube, 0, 0, 0, -objectDistance);

            Matrix.setIdentityM(modelFloor, 0);
            Matrix.translateM(modelFloor, 0, 0, -floorDepth, 0); // Floor appears below user.

            checkGLError("onSurfaceCreated");

            Console.WriteLine("exit AndroidCardboardExperiment onSurfaceCreated");
        }
        public void onSurfaceCreated(javax.microedition.khronos.egl.EGLConfig value)
        {
            Console.WriteLine("enter AndroidCardboardExperiment onSurfaceCreated");

            GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well.

            ByteBuffer bbVertices = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_COORDS.Length * 4);
            bbVertices.order(ByteOrder.nativeOrder());
            cubeVertices = bbVertices.asFloatBuffer();
            cubeVertices.put(WorldLayoutData.CUBE_COORDS);
            cubeVertices.position(0);

            ByteBuffer bbColors = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_COLORS.Length * 4);
            bbColors.order(ByteOrder.nativeOrder());
            cubeColors = bbColors.asFloatBuffer();
            cubeColors.put(WorldLayoutData.CUBE_COLORS);
            cubeColors.position(0);

            ByteBuffer bbFoundColors = ByteBuffer.allocateDirect(
                WorldLayoutData.CUBE_FOUND_COLORS.Length * 4);
            bbFoundColors.order(ByteOrder.nativeOrder());
            cubeFoundColors = bbFoundColors.asFloatBuffer();
            cubeFoundColors.put(WorldLayoutData.CUBE_FOUND_COLORS);
            cubeFoundColors.position(0);

            ByteBuffer bbNormals = ByteBuffer.allocateDirect(WorldLayoutData.CUBE_NORMALS.Length * 4);
            bbNormals.order(ByteOrder.nativeOrder());
            cubeNormals = bbNormals.asFloatBuffer();
            cubeNormals.put(WorldLayoutData.CUBE_NORMALS);
            cubeNormals.position(0);

            // make a floor
            ByteBuffer bbFloorVertices = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_COORDS.Length * 4);
            bbFloorVertices.order(ByteOrder.nativeOrder());
            floorVertices = bbFloorVertices.asFloatBuffer();
            floorVertices.put(WorldLayoutData.FLOOR_COORDS);
            floorVertices.position(0);

            ByteBuffer bbFloorNormals = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_NORMALS.Length * 4);
            bbFloorNormals.order(ByteOrder.nativeOrder());
            floorNormals = bbFloorNormals.asFloatBuffer();
            floorNormals.put(WorldLayoutData.FLOOR_NORMALS);
            floorNormals.position(0);

            var fcolors = 0xA26D41;
            // rgb to float

            //[javac]         return  __Enumerable.<Float>AsEnumerable(__SZArrayEnumerator_1.<Float>Of(x));
            //[javac]                                                                       ^
            //[javac]   required: T#1[]
            //[javac]   found: float[]
            //[javac]   reason: actual argument float[] cannot be converted to Float[] by method invocation conversion

            //          var FLOOR_COLORS = (
            //              from i in Enumerable.Range(0, 6)
            //              select new float[] { 0xA2 / 1.0f, 0x6D / 1.0f, 0x41 / 1.0f, 1.0f }
            //).SelectMany(x => x).ToArray();

            #region floorColors
            var FLOOR_COLORS = new float[4 * 6];

            for (int i = 0; i < FLOOR_COLORS.Length; i += 4)
            {
                FLOOR_COLORS[i + 0] = 0xA2 / 100.0f;
                FLOOR_COLORS[i + 1] = 0x6D / 100.0f;
                FLOOR_COLORS[i + 2] = 0x41 / 100.0f;
                FLOOR_COLORS[i + 3] = 1.0f;
            }



            FloatBuffer floorColors;

            ByteBuffer bbFloorColors = ByteBuffer.allocateDirect(WorldLayoutData.FLOOR_COLORS.Length * 4);
            bbFloorColors.order(ByteOrder.nativeOrder());
            floorColors = bbFloorColors.asFloatBuffer();
            //floorColors.put(WorldLayoutData.FLOOR_COLORS);
            floorColors.put(FLOOR_COLORS);
            floorColors.position(0);
            #endregion


            #region loadGLShader
            Func<int, ScriptCoreLib.GLSL.Shader, int> loadGLShader = (type, xshader) =>
            {
                var code = xshader.ToString();

                int shader = GLES20.glCreateShader(type);
                GLES20.glShaderSource(shader, code);
                GLES20.glCompileShader(shader);

                // Get the compilation status.
                int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                // If the compilation failed, delete the shader.
                if (compileStatus[0] == 0)
                {
                    Console.WriteLine("Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
                    GLES20.glDeleteShader(shader);
                    shader = 0;
                }

                if (shader == 0)
                {
                    throw new Exception("Error creating shader.");
                }

                return shader;
            };
            #endregion


            int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, new AndroidCardboardExperiment.Shaders.light_vertexVertexShader());
            int gridShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, new Shaders.xgrid_fragmentFragmentShader());
            int passthroughShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, new AndroidCardboardExperiment.Shaders.passthrough_fragmentFragmentShader());

            cubeProgram = GLES20.glCreateProgram();
            GLES20.glAttachShader(cubeProgram, vertexShader);
            GLES20.glAttachShader(cubeProgram, passthroughShader);
            GLES20.glLinkProgram(cubeProgram);
            GLES20.glUseProgram(cubeProgram);

            checkGLError("Cube program");

            cubePositionParam = GLES20.glGetAttribLocation(cubeProgram, "a_Position");
            cubeNormalParam = GLES20.glGetAttribLocation(cubeProgram, "a_Normal");
            cubeColorParam = GLES20.glGetAttribLocation(cubeProgram, "a_Color");

            cubeModelParam = GLES20.glGetUniformLocation(cubeProgram, "u_Model");
            cubeModelViewParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVMatrix");
            cubeModelViewProjectionParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVP");
            cubeLightPosParam = GLES20.glGetUniformLocation(cubeProgram, "u_LightPos");

            GLES20.glEnableVertexAttribArray(cubePositionParam);
            GLES20.glEnableVertexAttribArray(cubeNormalParam);
            GLES20.glEnableVertexAttribArray(cubeColorParam);

            checkGLError("Cube program params");

            floorProgram = GLES20.glCreateProgram();
            GLES20.glAttachShader(floorProgram, vertexShader);
            GLES20.glAttachShader(floorProgram, gridShader);
            GLES20.glLinkProgram(floorProgram);
            GLES20.glUseProgram(floorProgram);

            checkGLError("Floor program");

            floorModelParam = GLES20.glGetUniformLocation(floorProgram, "u_Model");
            floorModelViewParam = GLES20.glGetUniformLocation(floorProgram, "u_MVMatrix");
            floorModelViewProjectionParam = GLES20.glGetUniformLocation(floorProgram, "u_MVP");
            floorLightPosParam = GLES20.glGetUniformLocation(floorProgram, "u_LightPos");

            floorPositionParam = GLES20.glGetAttribLocation(floorProgram, "a_Position");
            floorNormalParam = GLES20.glGetAttribLocation(floorProgram, "a_Normal");
            floorColorParam = GLES20.glGetAttribLocation(floorProgram, "a_Color");

            GLES20.glEnableVertexAttribArray(floorPositionParam);
            GLES20.glEnableVertexAttribArray(floorNormalParam);
            GLES20.glEnableVertexAttribArray(floorColorParam);

            checkGLError("Floor program params");

            GLES20.glEnable(GLES20.GL_DEPTH_TEST);
            //GLES20.glEnable(GLES20.GL_FOG);




            checkGLError("onSurfaceCreated");

            Console.WriteLine("exit AndroidCardboardExperiment onSurfaceCreated");


            vFinishFrame = (com.google.vrtoolkit.cardboard.Viewport v) =>
            {

                // GPU thread stops now..
                FrameOne.Stop();
            };

            // I/System.Console(28103): CardboardForEdgeExperiment { ProcessorCount = 8, MODEL = SM-G925F, CurrentManagedThreadId = 11305, FrameCounter = 28, LastFrameMilliseconds = 40, codeFPS = 25.0, pitch = 1.579644, yaw = 1.6225219 }

            #region vNewFrame
            vNewFrame = (com.google.vrtoolkit.cardboard.HeadTransform headTransform) =>
            {
                // http://stackoverflow.com/questions/11851343/raise-fps-on-android-tablet-above-60-for-opengl-game
                // http://gafferongames.com/game-physics/fix-your-timestep/

                #region FrameWatch
                if (FrameWatch.ElapsedMilliseconds >= 1000)
                {
                    var codeFPS = 1000.0 / FrameOne.ElapsedMilliseconds;

                    // we now know how many frames did fit into it
                    // need 60 or more!
                    Console.WriteLine("CardboardForEdgeExperiment " + new
                    {
                        // static
                        System.Environment.ProcessorCount,

                        android.os.Build.MODEL,

                        System.Environment.CurrentManagedThreadId,

                        FrameCounter,

                        // dynamic
                        LastFrameMilliseconds = FrameOne.ElapsedMilliseconds,
                        codeFPS,

                        // very dynamic
                        pitch,
                        yaw
                    });

                    // I/System.Console(28117): CardboardForEdgeExperiment { ProcessorCount = 2, MODEL = Nexus 9, CurrentManagedThreadId = 1647, FrameCounter = 60, LastFrameMilliseconds = 6, codeFPS = 166.66666666666666, pitch = 1.5978987, yaw = -2.0770574 }

                    FrameWatch.Restart();
                    FrameCounter = 0;
                }

                #endregion
                // GPU thread starts now..
                FrameOne.Restart();
                FrameCounter++;


                //Console.WriteLine("AndroidCardboardExperiment onNewFrame");





                headTransform.getHeadView(headView, 0);

                checkGLError("onReadyToDraw");

                // I/System.Console(27769): CardboardForEdgeExperiment { FrameCounter = 61, LastFrameMilliseconds = 0, codeFPS = Infinity, CurrentManagedThreadId = 1637, ProcessorCount = 2, MODEL = Nexus 9 }

                // add placeholder slowdown
                //System.Threading.Thread.Sleep(5);
                // I/System.Console(27840): CardboardForEdgeExperiment { FrameCounter = 60, LastFrameMilliseconds = 6, codeFPS = 166.66666666666666, CurrentManagedThreadId = 1642, ProcessorCount = 2, MODEL = Nexus 9 }

            };
            #endregion

            // if we define it here, we get to see it in vr...
            var modelCube = new float[16];

            // I/System.Console(19917): CardboardForEdgeExperiment { ProcessorCount = 8, MODEL = SM-G925F, CurrentManagedThreadId = 9959, FrameCounter = 46, LastFrameMilliseconds = 6, codeFPS = 166.66666666666666, pitch = 0.9070491, yaw = -0.3660261 }

            #region vDrawEye
            vDrawEye = (com.google.vrtoolkit.cardboard.Eye eye) =>
            {
                // VIDEO via "X:\util\android-sdk-windows\tools\ddms.bat"

                var camera = new float[16];


                // static void	setLookAtM(float[] rm, int rmOffset, float eyeX, float eyeY, float eyeZ, float centerX, float centerY, float centerZ, float upX, float upY, float upZ)
                // Build the camera matrix and apply it to the ModelView.
                Matrix.setLookAtM(camera, 0,

                    0.0f, 0.0f, CAMERA_Z,

                   0f, 0.0f, 0.0f,

                    0.0f, 1.0f, 0.0f);


                #region glClearColor
                // skybox/video instead?
                GLES20.glClearColor(
                    0x87 / 255f,
                    0xCE / 255f,
                    0xEB / 255f, 1.0f
                );

                GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
                #endregion




                var view = new float[16];

                // can we strafe?



                // Apply the eye transformation to the camera.
                Matrix.multiplyMM(view, 0, eye.getEyeView(), 0, camera, 0);


                // we tapped into it. this strafes ius!
                Matrix.translateM(view, 0,

                    (float)Math.Sin(TotalTime.ElapsedMilliseconds * 0.0001f) * objectDistance * 2.5f,


                    // up down
                    //(float)Math.Sin(TotalTime.ElapsedMilliseconds * 0.001f) * floorDepth * 0.5f,
                    (float)Math.Cos(TotalTime.ElapsedMilliseconds * 0.001f) * floorDepth * 0.1f,

                    0
                    );


                // Set the position of the light
                Matrix.multiplyMV(lightPosInEyeSpace, 0, view, 0, LIGHT_POS_IN_WORLD_SPACE, 0);

                // Build the ModelView and ModelViewProjection matrices
                // for calculating cube position and light.
                float[] perspective = eye.getPerspective(Z_NEAR, Z_FAR);


                // just a buffer?
                var modelView = new float[16];


                #region drawCube()
                Action<float, float, float> drawCube = (tx, ty, tz) =>
                {

                    #region isLookingAtObject
                    Func<bool> isLookingAtObject = () =>
                    {
                        float[] initVec = { 0, 0, 0, 1.0f };



                        float[] objPositionVec = new float[4];

                        // Convert object space to camera space. Use the headView from onNewFrame.
                        Matrix.multiplyMM(modelView, 0, headView, 0, modelCube, 0);
                        Matrix.multiplyMV(objPositionVec, 0, modelView, 0, initVec, 0);


        
                        pitch = (float)Math.Atan2(objPositionVec[1], -objPositionVec[2]);
                        yaw = (float)Math.Atan2(objPositionVec[0], -objPositionVec[2]);

                        if (Math.Abs(pitch) < PITCH_LIMIT)
                            if (Math.Abs(yaw) < YAW_LIMIT)
                                return true;
                        return false;
                    };
                    #endregion




                    // Object first appears directly in front of user.
                    Matrix.setIdentityM(modelCube, 0);
                    // cant see it?
                    var scale = 5.0f;
                    //Matrix.scaleM(modelCube, 0, scale, scale, scale);

                    Matrix.translateM(modelCube, 0, tx, ty, tz);


                    Matrix.multiplyMM(modelView, 0, view, 0, modelCube, 0);
                    Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0);


                    // public static void scaleM (float[] m, int mOffset, float x, float y, float z)

                    // Build the Model part of the ModelView matrix.
                    //Matrix.rotateM(modelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);

                    // cant see rotation?
                    Matrix.rotateM(modelCube, 0, TotalTime.ElapsedMilliseconds * 0.01f,
                        // upwards rot.
                        //0.5f, 

                        0f,

                        // sideways, left to right
                        0.5f
                        , 0.0f);


                    // http://developer.android.com/reference/android/opengl/Matrix.html#translateM(float[], int, float, float, float)


                    // the cube rotates in front of us.
                    // do we need to use a special program to draw a cube?
                    // how can we make it bigger?

                    GLES20.glUseProgram(cubeProgram);

                    GLES20.glUniform3fv(cubeLightPosParam, 1, lightPosInEyeSpace, 0);

                    // Set the Model in the shader, used to calculate lighting
                    GLES20.glUniformMatrix4fv(cubeModelParam, 1, false, modelCube, 0);

                    // Set the ModelView in the shader, used to calculate lighting
                    GLES20.glUniformMatrix4fv(cubeModelViewParam, 1, false, modelView, 0);

                    // Set the position of the cube
                    GLES20.glVertexAttribPointer(cubePositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, cubeVertices);

                    // Set the ModelViewProjection matrix in the shader.
                    GLES20.glUniformMatrix4fv(cubeModelViewProjectionParam, 1, false, modelViewProjection, 0);

                    // Set the normal positions of the cube, again for shading
                    GLES20.glVertexAttribPointer(cubeNormalParam, 3, GLES20.GL_FLOAT, false, 0, cubeNormals);


                    #region cubeColors
                    var cc = cubeColors;
                    if (!isLookingAtObject()) cc = cubeFoundColors;

                    GLES20.glVertexAttribPointer(cubeColorParam, 4, GLES20.GL_FLOAT, false, 0, cc);
                    #endregion

                    GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
                    checkGLError("Drawing cube");
                };


                #endregion

                #region drawCube
                drawCube(0, objectDistance, objectDistance * -1.0f);


                drawCube(0, 0, objectDistance * -2.0f);

                // looks like an airstrip

                // low fps?
                //var endOfMatrix = 64;
                var endOfMatrix = 20;
                for (int i = -endOfMatrix; i < endOfMatrix; i++)
                {
                    drawCube(objectDistance, -floorDepth, objectDistance * -2.0f * i);
                    drawCube(-objectDistance, -floorDepth, objectDistance * -2.0f * i);


                    drawCube(objectDistance * 0.5f, 0, objectDistance * -2.0f * i);
                    drawCube(objectDistance * -0.5f, 0, objectDistance * -2.0f * i);
                }
                #endregion





                var modelFloor = new float[16];

                Matrix.setIdentityM(modelFloor, 0);
                Matrix.translateM(modelFloor, 0,

                    // the floor escapes!
                    //TotalTime.ElapsedMilliseconds * 0.01f,
                    0, -floorDepth, 0); // Floor appears below user.

                // Set modelView for the floor, so we draw floor in the correct location
                Matrix.multiplyMM(modelView, 0, view, 0, modelFloor, 0);
                Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0);

                #region drawFloor
                // called by onDrawEye
                Action drawFloor = delegate
                {
                    GLES20.glUseProgram(floorProgram);

                    // Set ModelView, MVP, position, normals, and color.
                    GLES20.glUniform3fv(floorLightPosParam, 1, lightPosInEyeSpace, 0);
                    GLES20.glUniformMatrix4fv(floorModelParam, 1, false, modelFloor, 0);
                    GLES20.glUniformMatrix4fv(floorModelViewParam, 1, false, modelView, 0);
                    GLES20.glUniformMatrix4fv(floorModelViewProjectionParam, 1, false,
                        modelViewProjection, 0);
                    GLES20.glVertexAttribPointer(floorPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
                        false, 0, floorVertices);
                    GLES20.glVertexAttribPointer(floorNormalParam, 3, GLES20.GL_FLOAT, false, 0,
                        floorNormals);
                    GLES20.glVertexAttribPointer(floorColorParam, 4, GLES20.GL_FLOAT, false, 0, floorColors);

                    GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6);

                    checkGLError("drawing floor");
                };

                drawFloor();
                #endregion


            };
            #endregion

        }
 public __Float32Array(params float[] array)
 {
     InternalFloatArray = array;
     InternalFloatBuffer = FloatBuffer.wrap(
         array
     );
 }
            public LessonFourRenderer(Context activityContext)
            {
                this.gl = (ScriptCoreLib.JavaScript.WebGL.WebGLRenderingContext)(object)__gl;


                mActivityContext = activityContext;

                #region Define points for a cube.

                // X, Y, Z
                float[] cubePositionData =
		        {
				        // In OpenGL counter-clockwise winding is default. This means that when we look at a triangle, 
				        // if the points are counter-clockwise we are looking at the "front". If not we are looking at
				        // the back. OpenGL has an optimization where all back-facing triangles are culled, since they
				        // usually represent the backside of an object and aren't visible anyways.
				
				        // Front face
				        -1.0f, 1.0f, 1.0f,				
				        -1.0f, -1.0f, 1.0f,
				        1.0f, 1.0f, 1.0f, 
				        -1.0f, -1.0f, 1.0f, 				
				        1.0f, -1.0f, 1.0f,
				        1.0f, 1.0f, 1.0f,
				
				        // Right face
				        1.0f, 1.0f, 1.0f,				
				        1.0f, -1.0f, 1.0f,
				        1.0f, 1.0f, -1.0f,
				        1.0f, -1.0f, 1.0f,				
				        1.0f, -1.0f, -1.0f,
				        1.0f, 1.0f, -1.0f,
				
				        // Back face
				        1.0f, 1.0f, -1.0f,				
				        1.0f, -1.0f, -1.0f,
				        -1.0f, 1.0f, -1.0f,
				        1.0f, -1.0f, -1.0f,				
				        -1.0f, -1.0f, -1.0f,
				        -1.0f, 1.0f, -1.0f,
				
				        // Left face
				        -1.0f, 1.0f, -1.0f,				
				        -1.0f, -1.0f, -1.0f,
				        -1.0f, 1.0f, 1.0f, 
				        -1.0f, -1.0f, -1.0f,				
				        -1.0f, -1.0f, 1.0f, 
				        -1.0f, 1.0f, 1.0f, 
				
				        // Top face
				        -1.0f, 1.0f, -1.0f,				
				        -1.0f, 1.0f, 1.0f, 
				        1.0f, 1.0f, -1.0f, 
				        -1.0f, 1.0f, 1.0f, 				
				        1.0f, 1.0f, 1.0f, 
				        1.0f, 1.0f, -1.0f,
				
				        // Bottom face
				        1.0f, -1.0f, -1.0f,				
				        1.0f, -1.0f, 1.0f, 
				        -1.0f, -1.0f, -1.0f,
				        1.0f, -1.0f, 1.0f, 				
				        -1.0f, -1.0f, 1.0f,
				        -1.0f, -1.0f, -1.0f,
		        };

                // R, G, B, A
                float[] cubeColorData =
		        {				
				        // Front face (red)
				        1.0f, 0.0f, 0.0f, 1.0f,				
				        1.0f, 0.0f, 0.0f, 1.0f,
				        1.0f, 0.0f, 0.0f, 1.0f,
				        1.0f, 0.0f, 0.0f, 1.0f,				
				        1.0f, 0.0f, 0.0f, 1.0f,
				        1.0f, 0.0f, 0.0f, 1.0f,
				
				        // Right face (green)
				        0.0f, 1.0f, 0.0f, 1.0f,				
				        0.0f, 1.0f, 0.0f, 1.0f,
				        0.0f, 1.0f, 0.0f, 1.0f,
				        0.0f, 1.0f, 0.0f, 1.0f,				
				        0.0f, 1.0f, 0.0f, 1.0f,
				        0.0f, 1.0f, 0.0f, 1.0f,
				
				        // Back face (blue)
				        0.0f, 0.0f, 1.0f, 1.0f,				
				        0.0f, 0.0f, 1.0f, 1.0f,
				        0.0f, 0.0f, 1.0f, 1.0f,
				        0.0f, 0.0f, 1.0f, 1.0f,				
				        0.0f, 0.0f, 1.0f, 1.0f,
				        0.0f, 0.0f, 1.0f, 1.0f,
				
				        // Left face (yellow)
				        1.0f, 1.0f, 0.0f, 1.0f,				
				        1.0f, 1.0f, 0.0f, 1.0f,
				        1.0f, 1.0f, 0.0f, 1.0f,
				        1.0f, 1.0f, 0.0f, 1.0f,				
				        1.0f, 1.0f, 0.0f, 1.0f,
				        1.0f, 1.0f, 0.0f, 1.0f,
				
				        // Top face (cyan)
				        0.0f, 1.0f, 1.0f, 1.0f,				
				        0.0f, 1.0f, 1.0f, 1.0f,
				        0.0f, 1.0f, 1.0f, 1.0f,
				        0.0f, 1.0f, 1.0f, 1.0f,				
				        0.0f, 1.0f, 1.0f, 1.0f,
				        0.0f, 1.0f, 1.0f, 1.0f,
				
				        // Bottom face (magenta)
				        1.0f, 0.0f, 1.0f, 1.0f,				
				        1.0f, 0.0f, 1.0f, 1.0f,
				        1.0f, 0.0f, 1.0f, 1.0f,
				        1.0f, 0.0f, 1.0f, 1.0f,				
				        1.0f, 0.0f, 1.0f, 1.0f,
				        1.0f, 0.0f, 1.0f, 1.0f
		        };

                // X, Y, Z
                // The normal is used in light calculations and is a vector which points
                // orthogonal to the plane of the surface. For a cube model, the normals
                // should be orthogonal to the points of each face.
                float[] cubeNormalData =
		        {												
				        // Front face
				        0.0f, 0.0f, 1.0f,				
				        0.0f, 0.0f, 1.0f,
				        0.0f, 0.0f, 1.0f,
				        0.0f, 0.0f, 1.0f,				
				        0.0f, 0.0f, 1.0f,
				        0.0f, 0.0f, 1.0f,
				
				        // Right face 
				        1.0f, 0.0f, 0.0f,				
				        1.0f, 0.0f, 0.0f,
				        1.0f, 0.0f, 0.0f,
				        1.0f, 0.0f, 0.0f,				
				        1.0f, 0.0f, 0.0f,
				        1.0f, 0.0f, 0.0f,
				
				        // Back face 
				        0.0f, 0.0f, -1.0f,				
				        0.0f, 0.0f, -1.0f,
				        0.0f, 0.0f, -1.0f,
				        0.0f, 0.0f, -1.0f,				
				        0.0f, 0.0f, -1.0f,
				        0.0f, 0.0f, -1.0f,
				
				        // Left face 
				        -1.0f, 0.0f, 0.0f,				
				        -1.0f, 0.0f, 0.0f,
				        -1.0f, 0.0f, 0.0f,
				        -1.0f, 0.0f, 0.0f,				
				        -1.0f, 0.0f, 0.0f,
				        -1.0f, 0.0f, 0.0f,
				
				        // Top face 
				        0.0f, 1.0f, 0.0f,			
				        0.0f, 1.0f, 0.0f,
				        0.0f, 1.0f, 0.0f,
				        0.0f, 1.0f, 0.0f,				
				        0.0f, 1.0f, 0.0f,
				        0.0f, 1.0f, 0.0f,
				
				        // Bottom face 
				        0.0f, -1.0f, 0.0f,			
				        0.0f, -1.0f, 0.0f,
				        0.0f, -1.0f, 0.0f,
				        0.0f, -1.0f, 0.0f,				
				        0.0f, -1.0f, 0.0f,
				        0.0f, -1.0f, 0.0f
		        };

                // S, T (or X, Y)
                // Texture coordinate data.
                // Because images have a Y axis pointing downward (values increase as you move down the image) while
                // OpenGL has a Y axis pointing upward, we adjust for that here by flipping the Y axis.
                // What's more is that the texture coordinates are the same for every face.
                float[] cubeTextureCoordinateData =
		        {												
				        // Front face
				        0.0f, 0.0f, 				
				        0.0f, 1.0f,
				        1.0f, 0.0f,
				        0.0f, 1.0f,
				        1.0f, 1.0f,
				        1.0f, 0.0f,				
				
				        // Right face 
				        0.0f, 0.0f, 				
				        0.0f, 1.0f,
				        1.0f, 0.0f,
				        0.0f, 1.0f,
				        1.0f, 1.0f,
				        1.0f, 0.0f,	
				
				        // Back face 
				        0.0f, 0.0f, 				
				        0.0f, 1.0f,
				        1.0f, 0.0f,
				        0.0f, 1.0f,
				        1.0f, 1.0f,
				        1.0f, 0.0f,	
				
				        // Left face 
				        0.0f, 0.0f, 				
				        0.0f, 1.0f,
				        1.0f, 0.0f,
				        0.0f, 1.0f,
				        1.0f, 1.0f,
				        1.0f, 0.0f,	
				
				        // Top face 
				        0.0f, 0.0f, 				
				        0.0f, 1.0f,
				        1.0f, 0.0f,
				        0.0f, 1.0f,
				        1.0f, 1.0f,
				        1.0f, 0.0f,	
				
				        // Bottom face 
				        0.0f, 0.0f, 				
				        0.0f, 1.0f,
				        1.0f, 0.0f,
				        0.0f, 1.0f,
				        1.0f, 1.0f,
				        1.0f, 0.0f
		        };
                #endregion

                // Initialize the buffers.
                mCubePositions = ByteBuffer.allocateDirect(cubePositionData.Length * mBytesPerFloat)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
                mCubePositions.put(cubePositionData).position(0);

                mCubeColors = ByteBuffer.allocateDirect(cubeColorData.Length * mBytesPerFloat)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
                mCubeColors.put(cubeColorData).position(0);

                mCubeNormals = ByteBuffer.allocateDirect(cubeNormalData.Length * mBytesPerFloat)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
                mCubeNormals.put(cubeNormalData).position(0);

                mCubeTextureCoordinates = ByteBuffer.allocateDirect(cubeTextureCoordinateData.Length * mBytesPerFloat)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
                mCubeTextureCoordinates.put(cubeTextureCoordinateData).position(0);
            }
            private void initTriangle()
            {
                float[] coords = {
                        -0.5f, -0.5f, 0.5f, // 0
                        0.5f, -0.5f, 0.5f, // 1
                        0f, -0.5f, -0.5f, // 2
                        0f, 0.5f, 0f, // 3
                };
                _nrOfVertices = coords.Length;

                float[] colors = {
                        1f, 0f, 0f, 1f, // point 0 red
                        0f, 1f, 0f, 1f, // point 1 green
                        0f, 0f, 1f, 1f, // point 2 blue
                        1f, 1f, 1f, 1f, // point 3 white
                };

                short[] indices = new short[] {
                        0, 1, 3, // rwg
                        0, 2, 1, // rbg
                        0, 3, 2, // rbw
                        1, 2, 3, // bwg
                };

                // float has 4 bytes, coordinate * 4 bytes
                ByteBuffer vbb = ByteBuffer.allocateDirect(coords.Length * 4);
                vbb.order(ByteOrder.nativeOrder());
                _vertexBuffer = vbb.asFloatBuffer();

                // short has 2 bytes, indices * 2 bytes
                ByteBuffer ibb = ByteBuffer.allocateDirect(indices.Length * 2);
                ibb.order(ByteOrder.nativeOrder());
                _indexBuffer = ibb.asShortBuffer();

                // float has 4 bytes, colors (RGBA) * 4 bytes
                ByteBuffer cbb = ByteBuffer.allocateDirect(colors.Length * 4);
                cbb.order(ByteOrder.nativeOrder());
                _colorBuffer = cbb.asFloatBuffer();

                _vertexBuffer.put(coords);
                _indexBuffer.put(indices);
                _colorBuffer.put(colors);

                _vertexBuffer.position(0);
                _indexBuffer.position(0);
                _colorBuffer.position(0);
            }
            public LessonOneRenderer()
            {
                this.gl = (ScriptCoreLib.JavaScript.WebGL.WebGLRenderingContext)(object)__gl;


                #region Define points for equilateral triangles.

                // This triangle is red, green, and blue.
                float[] triangle1VerticesData = {
				// X, Y, Z, 
				// R, G, B, A
	            -0.5f, -0.25f, 0.0f, 
	            1.0f, 0.0f, 0.0f, 1.0f,
	            
	            0.5f, -0.25f, 0.0f,
	            0.0f, 0.0f, 1.0f, 1.0f,
	            
	            0.0f, 0.559016994f, 0.0f, 
	            0.0f, 1.0f, 0.0f, 1.0f};

                // This triangle is yellow, cyan, and magenta.
                float[] triangle2VerticesData = {
				// X, Y, Z, 
				// R, G, B, A
	            -0.5f, -0.25f, 0.0f, 
	            1.0f, 1.0f, 0.0f, 1.0f,
	            
	            0.5f, -0.25f, 0.0f, 
	            0.0f, 1.0f, 1.0f, 1.0f,
	            
	            0.0f, 0.559016994f, 0.0f, 
	            1.0f, 0.0f, 1.0f, 1.0f};

                // This triangle is white, gray, and black.
                float[] triangle3VerticesData = {
				// X, Y, Z, 
				// R, G, B, A
	            -0.5f, -0.25f, 0.0f, 
	            1.0f, 1.0f, 1.0f, 1.0f,
	            
	            0.5f, -0.25f, 0.0f, 
	            0.5f, 0.5f, 0.5f, 1.0f,
	            
	            0.0f, 0.559016994f, 0.0f, 
	            0.0f, 0.0f, 0.0f, 1.0f};
                #endregion

                // Initialize the buffers.
                mTriangle1Vertices = ByteBuffer.allocateDirect(triangle1VerticesData.Length * mBytesPerFloat)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
                mTriangle2Vertices = ByteBuffer.allocateDirect(triangle2VerticesData.Length * mBytesPerFloat)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
                mTriangle3Vertices = ByteBuffer.allocateDirect(triangle3VerticesData.Length * mBytesPerFloat)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();

                mTriangle1Vertices.put(triangle1VerticesData).position(0);
                mTriangle2Vertices.put(triangle2VerticesData).position(0);
                mTriangle3Vertices.put(triangle3VerticesData).position(0);
            }