bool CreateFrameBuffer ()
		{
			bool success = true;
					
			GL.Disable (EnableCap.DepthTest);
					
			GL.GenFramebuffers (1, out FrameBuffer);
			GL.BindFramebuffer (FramebufferTarget.Framebuffer, FrameBuffer);
					
			GL.GenRenderbuffers (1, out ColorBuffer);
			GL.BindRenderbuffer (RenderbufferTarget.Renderbuffer, ColorBuffer);
					
			Context.RenderBufferStorage ((uint)All.Renderbuffer, (CAEAGLLayer)Layer);
					
			GL.GetRenderbufferParameter (RenderbufferTarget.Renderbuffer, RenderbufferParameterName.RenderbufferWidth, out renderBufferWidth);
			GL.GetRenderbufferParameter (RenderbufferTarget.Renderbuffer, RenderbufferParameterName.RenderbufferHeight, out renderBufferHeight);
			
			GL.FramebufferRenderbuffer (FramebufferTarget.Framebuffer, FramebufferSlot.ColorAttachment0, RenderbufferTarget.Renderbuffer, ColorBuffer);
					
			if (GL.CheckFramebufferStatus (FramebufferTarget.Framebuffer) != FramebufferErrorCode.FramebufferComplete){
				Console.WriteLine ("Failure with framebuffer generation");
				success = false;
			}
					
			// Create a new CVOpenGLESTexture Cache
			videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (Context);			

			glProgram = CreateProgram ();
			
			if (glProgram == 0)
				success = false;					
			
			return success;
		}
		public void SetupOffScreenRenderContext()
		{
			//-- Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion.
			if (VideoTextureCache != null) {
				VideoTextureCache.Dispose ();
				VideoTextureCache = null;
			}

			VideoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (CurrentContext);
			GL.Disable (EnableCap.DepthTest);
			GL.GenFramebuffers (1, out OffscreenBufferHandle);
			GL.BindFramebuffer (FramebufferTarget.Framebuffer, OffscreenBufferHandle);
		}
		void SetupAVCapture (NSString sessionPreset)
		{
			if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (context)) == null){
				Console.WriteLine ("Could not create the CoreVideo TextureCache");
				return;
			}
			session = new AVCaptureSession ();
			session.BeginConfiguration ();
			
			// Preset size
			session.SessionPreset = sessionPreset;
			
			// Input device
			var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			if (videoDevice == null){
				Console.WriteLine ("No video device");
				return;
			}
			NSError err;
			var input = new AVCaptureDeviceInput (videoDevice, out err);
			if (err != null){
				Console.WriteLine ("Error creating video capture device");
				return;
			}
			session.AddInput (input);
			
			// Create the output device
			var dataOutput = new AVCaptureVideoDataOutput () {
				AlwaysDiscardsLateVideoFrames = true,
				
				// YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of 
				// memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)
			};
					
			dataOutputDelegate = new DataOutputDelegate (this);

			// 
			// This dispatches the video frames into the main thread, because the OpenGL
			// code is accessing the data synchronously.
			//
			dataOutput.SetSampleBufferDelegateAndQueue (dataOutputDelegate, DispatchQueue.MainQueue);
			session.AddOutput (dataOutput);
			session.CommitConfiguration ();
			session.StartRunning ();
		}