コード例 #1
0
ファイル: EAGLLayer.cs プロジェクト: ysmadhav/ios-samples
        public void SetupGL()
        {
            EAGLContext.SetCurrentContext(context);
            SetupBuffers();
            LoadShaders();

            GL.UseProgram(Program);

            // 0 and 1 are the texture IDs of lumaTexture and chromaTexture respectively.
            GL.Uniform1(uniforms [(int)UniformIndex.Y], 0);
            GL.Uniform1(uniforms [(int)UniformIndex.UV], 1);
            GL.Uniform1(uniforms [(int)UniformIndex.RotationAngle], 0);
            GL.UniformMatrix3(uniforms [(int)UniformIndex.ColorConversionMatrix], 1, false, preferredConversion);

            if (videoTextureCache != null)
            {
                return;
            }

            videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(context);
            if (videoTextureCache == null)
            {
                Console.WriteLine("Error at CVOpenGLESTextureCache.FromEAGLContext");
            }
        }
コード例 #2
0
        private bool CreateFrameBuffer()
        {
            var success = true;

            GL.Disable(EnableCap.DepthTest);

            GL.GenFramebuffers(1, out frameBuffer);
            GL.BindFramebuffer(FramebufferTarget.Framebuffer, frameBuffer);

            GL.GenRenderbuffers(1, out colorBuffer);
            GL.BindRenderbuffer(RenderbufferTarget.Renderbuffer, colorBuffer);

            context.RenderBufferStorage((uint)All.Renderbuffer, (CAEAGLLayer)Layer);

            GL.GetRenderbufferParameter(RenderbufferTarget.Renderbuffer, RenderbufferParameterName.RenderbufferWidth, out renderBufferWidth);
            GL.GetRenderbufferParameter(RenderbufferTarget.Renderbuffer, RenderbufferParameterName.RenderbufferHeight, out renderBufferHeight);

            GL.FramebufferRenderbuffer(FramebufferTarget.Framebuffer, FramebufferSlot.ColorAttachment0, RenderbufferTarget.Renderbuffer, colorBuffer);

            if (GL.CheckFramebufferStatus(FramebufferTarget.Framebuffer) != FramebufferErrorCode.FramebufferComplete)
            {
                Console.WriteLine("Failure with framebuffer generation");
                success = false;
            }

            // Create a new CVOpenGLESTexture Cache
            videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(context);

            glProgram = CreateProgram();

            return(success && (glProgram != 0));
        }
コード例 #3
0
        private void SetupAVCapture(NSString sessionPreset)
        {
            if ((this.VideoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(this.context)) == null)
            {
                Console.WriteLine("Could not create the CoreVideo TextureCache");
                return;
            }

            this.session = new AVCaptureSession();
            this.session.BeginConfiguration();

            // Preset size
            this.session.SessionPreset = sessionPreset;

            // Input device
            var videoDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            if (videoDevice == null)
            {
                Console.WriteLine("No video device");
                return;
            }

            var input = new AVCaptureDeviceInput(videoDevice, out NSError error);

            if (error != null)
            {
                Console.WriteLine("Error creating video capture device");
                return;
            }

            this.session.AddInput(input);

            // Create the output device
            using (var dataOutput = new AVCaptureVideoDataOutput())
            {
                dataOutput.AlwaysDiscardsLateVideoFrames = true;

                // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of
                // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
                //VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)

                this.dataOutputDelegate = new DataOutputDelegate(this);

                //
                // This dispatches the video frames into the main thread, because the OpenGL
                // code is accessing the data synchronously.
                //
                dataOutput.SetSampleBufferDelegateQueue(this.dataOutputDelegate, DispatchQueue.MainQueue);
                this.session.AddOutput(dataOutput);
            }

            this.session.CommitConfiguration();
            this.session.StartRunning();
        }
コード例 #4
0
        public void SetupOffScreenRenderContext()
        {
            //-- Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion.
            if (VideoTextureCache != null)
            {
                VideoTextureCache.Dispose();
                VideoTextureCache = null;
            }

            VideoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(CurrentContext);
            GL.Disable(EnableCap.DepthTest);
            GL.GenFramebuffers(1, out OffscreenBufferHandle);
            GL.BindFramebuffer(FramebufferTarget.Framebuffer, OffscreenBufferHandle);
        }