예제 #1
0
        private void SetupAVCapture(NSString sessionPreset)
        {
            if ((this.VideoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(this.context)) == null)
            {
                Console.WriteLine("Could not create the CoreVideo TextureCache");
                return;
            }

            this.session = new AVCaptureSession();
            this.session.BeginConfiguration();

            // Preset size
            this.session.SessionPreset = sessionPreset;

            // Input device
            var videoDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            if (videoDevice == null)
            {
                Console.WriteLine("No video device");
                return;
            }

            var input = new AVCaptureDeviceInput(videoDevice, out NSError error);

            if (error != null)
            {
                Console.WriteLine("Error creating video capture device");
                return;
            }

            this.session.AddInput(input);

            // Create the output device
            using (var dataOutput = new AVCaptureVideoDataOutput())
            {
                dataOutput.AlwaysDiscardsLateVideoFrames = true;

                // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of
                // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
                //VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)

                this.dataOutputDelegate = new DataOutputDelegate(this);

                //
                // This dispatches the video frames into the main thread, because the OpenGL
                // code is accessing the data synchronously.
                //
                dataOutput.SetSampleBufferDelegateQueue(this.dataOutputDelegate, DispatchQueue.MainQueue);
                this.session.AddOutput(dataOutput);
            }

            this.session.CommitConfiguration();
            this.session.StartRunning();
        }
		void SetupAVCapture (NSString sessionPreset)
		{
			if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (context)) == null){
				Console.WriteLine ("Could not create the CoreVideo TextureCache");
				return;
			}
			session = new AVCaptureSession ();
			session.BeginConfiguration ();
			
			// Preset size
			session.SessionPreset = sessionPreset;
			
			// Input device
			var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			if (videoDevice == null){
				Console.WriteLine ("No video device");
				return;
			}
			NSError err;
			var input = new AVCaptureDeviceInput (videoDevice, out err);
			if (err != null){
				Console.WriteLine ("Error creating video capture device");
				return;
			}
			session.AddInput (input);
			
			// Create the output device
			var dataOutput = new AVCaptureVideoDataOutput () {
				AlwaysDiscardsLateVideoFrames = true,
				
				// YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of 
				// memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)
			};
					
			dataOutputDelegate = new DataOutputDelegate (this);

			// 
			// This dispatches the video frames into the main thread, because the OpenGL
			// code is accessing the data synchronously.
			//
			dataOutput.SetSampleBufferDelegateAndQueue (dataOutputDelegate, DispatchQueue.MainQueue);
			session.AddOutput (dataOutput);
			session.CommitConfiguration ();
			session.StartRunning ();
		}