Пример #1
0
        public void SetupGL()
        {
            EAGLContext.SetCurrentContext(context);
            SetupBuffers();
            LoadShaders();

            GL.UseProgram(Program);

            // 0 and 1 are the texture IDs of lumaTexture and chromaTexture respectively.
            GL.Uniform1(uniforms [(int)UniformIndex.Y], 0);
            GL.Uniform1(uniforms [(int)UniformIndex.UV], 1);
            GL.Uniform1(uniforms [(int)UniformIndex.RotationAngle], 0);
            GL.UniformMatrix3(uniforms [(int)UniformIndex.ColorConversionMatrix], 1, false, preferredConversion);

            if (videoTextureCache != null)
            {
                return;
            }

            videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(context);
            if (videoTextureCache == null)
            {
                Console.WriteLine("Error at CVOpenGLESTextureCache.FromEAGLContext");
            }
        }
Пример #2
0
        private bool CreateFrameBuffer()
        {
            var success = true;

            GL.Disable(EnableCap.DepthTest);

            GL.GenFramebuffers(1, out frameBuffer);
            GL.BindFramebuffer(FramebufferTarget.Framebuffer, frameBuffer);

            GL.GenRenderbuffers(1, out colorBuffer);
            GL.BindRenderbuffer(RenderbufferTarget.Renderbuffer, colorBuffer);

            context.RenderBufferStorage((uint)All.Renderbuffer, (CAEAGLLayer)Layer);

            GL.GetRenderbufferParameter(RenderbufferTarget.Renderbuffer, RenderbufferParameterName.RenderbufferWidth, out renderBufferWidth);
            GL.GetRenderbufferParameter(RenderbufferTarget.Renderbuffer, RenderbufferParameterName.RenderbufferHeight, out renderBufferHeight);

            GL.FramebufferRenderbuffer(FramebufferTarget.Framebuffer, FramebufferSlot.ColorAttachment0, RenderbufferTarget.Renderbuffer, colorBuffer);

            if (GL.CheckFramebufferStatus(FramebufferTarget.Framebuffer) != FramebufferErrorCode.FramebufferComplete)
            {
                Console.WriteLine("Failure with framebuffer generation");
                success = false;
            }

            // Create a new CVOpenGLESTexture Cache
            videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(context);

            glProgram = CreateProgram();

            return(success && (glProgram != 0));
        }
Пример #3
0
        private void SetupAVCapture(NSString sessionPreset)
        {
            if ((this.VideoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(this.context)) == null)
            {
                Console.WriteLine("Could not create the CoreVideo TextureCache");
                return;
            }

            this.session = new AVCaptureSession();
            this.session.BeginConfiguration();

            // Preset size
            this.session.SessionPreset = sessionPreset;

            // Input device
            var videoDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            if (videoDevice == null)
            {
                Console.WriteLine("No video device");
                return;
            }

            var input = new AVCaptureDeviceInput(videoDevice, out NSError error);

            if (error != null)
            {
                Console.WriteLine("Error creating video capture device");
                return;
            }

            this.session.AddInput(input);

            // Create the output device
            using (var dataOutput = new AVCaptureVideoDataOutput())
            {
                dataOutput.AlwaysDiscardsLateVideoFrames = true;

                // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of
                // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
                //VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)

                this.dataOutputDelegate = new DataOutputDelegate(this);

                //
                // This dispatches the video frames into the main thread, because the OpenGL
                // code is accessing the data synchronously.
                //
                dataOutput.SetSampleBufferDelegateQueue(this.dataOutputDelegate, DispatchQueue.MainQueue);
                this.session.AddOutput(dataOutput);
            }

            this.session.CommitConfiguration();
            this.session.StartRunning();
        }
Пример #4
0
        public void SetupOffScreenRenderContext()
        {
            //-- Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion.
            if (VideoTextureCache != null)
            {
                VideoTextureCache.Dispose();
                VideoTextureCache = null;
            }

            VideoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(CurrentContext);
            GL.Disable(EnableCap.DepthTest);
            GL.GenFramebuffers(1, out OffscreenBufferHandle);
            GL.BindFramebuffer(FramebufferTarget.Framebuffer, OffscreenBufferHandle);
        }
Пример #5
0
		public void SetupGL ()
		{
			EAGLContext.SetCurrentContext (context);
			SetupBuffers ();
			LoadShaders ();

			GL.UseProgram (Program);

			// 0 and 1 are the texture IDs of lumaTexture and chromaTexture respectively.
			GL.Uniform1 (uniforms [(int)UniformIndex.Y], 0);
			GL.Uniform1 (uniforms [(int)UniformIndex.UV], 1);
			GL.Uniform1 (uniforms [(int)UniformIndex.RotationAngle], 0);
			GL.UniformMatrix3 (uniforms [(int)UniformIndex.ColorConversionMatrix], 1, false, preferredConversion);

			if (videoTextureCache != null)
				return;

			videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (context);
			if (videoTextureCache == null)
				Console.WriteLine ("Error at CVOpenGLESTextureCache.FromEAGLContext");
		}
        bool CreateFrameBuffer()
        {
            bool success = true;

            GL.Disable (EnableCap.DepthTest);

            GL.GenFramebuffers (1, out FrameBuffer);
            GL.BindFramebuffer (FramebufferTarget.Framebuffer, FrameBuffer);

            GL.GenRenderbuffers (1, out ColorBuffer);
            GL.BindRenderbuffer (RenderbufferTarget.Renderbuffer, ColorBuffer);

            Context.RenderBufferStorage ((uint)All.Renderbuffer, (CAEAGLLayer)Layer);

            GL.GetRenderbufferParameter (RenderbufferTarget.Renderbuffer, RenderbufferParameterName.RenderbufferWidth, out renderBufferWidth);
            GL.GetRenderbufferParameter (RenderbufferTarget.Renderbuffer, RenderbufferParameterName.RenderbufferHeight, out renderBufferHeight);

            GL.FramebufferRenderbuffer (FramebufferTarget.Framebuffer, FramebufferSlot.ColorAttachment0, RenderbufferTarget.Renderbuffer, ColorBuffer);

            if (GL.CheckFramebufferStatus (FramebufferTarget.Framebuffer) != FramebufferErrorCode.FramebufferComplete) {
                Console.WriteLine ("Failure with framebuffer generation");
                success = false;
            }

            // Create a new CVOpenGLESTexture Cache
            videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (Context);

            glProgram = CreateProgram ();

            return success && (glProgram != 0);
        }
		public void SetupOffScreenRenderContext()
		{
			//-- Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion.
			if (VideoTextureCache != null) {
				VideoTextureCache.Dispose ();
				VideoTextureCache = null;
			}

			VideoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (CurrentContext);
			GL.Disable (EnableCap.DepthTest);
			GL.GenFramebuffers (1, out OffscreenBufferHandle);
			GL.BindFramebuffer (FramebufferTarget.Framebuffer, OffscreenBufferHandle);
		}
		void SetupAVCapture (NSString sessionPreset)
		{
			if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (context)) == null){
				Console.WriteLine ("Could not create the CoreVideo TextureCache");
				return;
			}
			session = new AVCaptureSession ();
			session.BeginConfiguration ();

			// Preset size
			session.SessionPreset = sessionPreset;

			// Input device
			var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			if (videoDevice == null){
				Console.WriteLine ("No video device");
				return;
			}
			NSError err;
			var input = new AVCaptureDeviceInput (videoDevice, out err);
			if (err != null){
				Console.WriteLine ("Error creating video capture device");
				return;
			}
			session.AddInput (input);

			// Create the output device
			var dataOutput = new AVCaptureVideoDataOutput () {
				AlwaysDiscardsLateVideoFrames = true,

				// YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of
				// memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
				//VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)
			};

			dataOutputDelegate = new DataOutputDelegate (this);

			//
			// This dispatches the video frames into the main thread, because the OpenGL
			// code is accessing the data synchronously.
			//
			dataOutput.SetSampleBufferDelegateQueue (dataOutputDelegate, DispatchQueue.MainQueue);
			session.AddOutput (dataOutput);
			session.CommitConfiguration ();
			session.StartRunning ();
		}