Ejemplo n.º 1
0
        bool SetupCaptureSession()
        {
            Console.WriteLine("SetupCaptureSession");
            // Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
            // is used to deliver both audio and video buffers, and our video processing consistently takes
            // too long, the delivery queue can back up, resulting in audio being dropped.
            //
            // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
            // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
            //
            // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.

            // Create Capture session
            captureSession = new AVCaptureSession();
            captureSession.BeginConfiguration();

            // Create audio connection
            NSError error;
            var     audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);          //AudioDevice ();

            if (audioDevice == null)
            {
                return(false);                // e.g. simulator
            }
            AVCaptureDeviceInput audioIn = new AVCaptureDeviceInput(audioDevice, out error);

            if (captureSession.CanAddInput(audioIn))
            {
                captureSession.AddInput(audioIn);
            }

            AVCaptureAudioDataOutput audioOut = new AVCaptureAudioDataOutput();

            // Add the Delegate to capture each sample that comes through
            audioOut.SetSampleBufferDelegatequeue(audioDataOutputDelegate, movieWritingQueue);

            if (captureSession.CanAddOutput(audioOut))
            {
                captureSession.AddOutput(audioOut);
            }

            // Create Video Session
            var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);              //VideoDeviceWithPosition (AVCaptureDevicePosition.Back);
            AVCaptureDeviceInput videoIn = new AVCaptureDeviceInput(videoDevice, out error);

            if (captureSession.CanAddInput(videoIn))
            {
                captureSession.AddInput(videoIn);
            }

            // RosyWriter prefers to discard late video frames early in the capture pipeline, since its
            // processing can take longer than real-time on some platforms (such as iPhone 3GS).
            // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
            // alwaysDiscardsLateVideoFrames property to NO.
            AVCaptureVideoDataOutput videoOut = new AVCaptureVideoDataOutput()
            {
                AlwaysDiscardsLateVideoFrames = true,
                VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA)
            };

            // Create a DispatchQueue for the Video Processing
            DispatchQueue videoCaptureQueue = new DispatchQueue("Video Capture Queue");

            videoOut.SetSampleBufferDelegateAndQueue(videoDataOutputDelegate, videoCaptureQueue);

            if (captureSession.CanAddOutput(videoOut))
            {
                captureSession.AddOutput(videoOut);
            }

            // Set the Video connection from the Video Output object
            videoConnection  = videoOut.ConnectionFromMediaType(AVMediaType.Video);
            videoOrientation = videoConnection.VideoOrientation;

            captureSession.CommitConfiguration();

            return(true);
        }
	    bool SetupCaptureSession ()
		{
			Console.WriteLine ("SetupCaptureSession");
			// Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
			// is used to deliver both audio and video buffers, and our video processing consistently takes
			// too long, the delivery queue can back up, resulting in audio being dropped.
			// 
			// When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
			// that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
			//				
			// RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.
			
			// Create Capture session
			captureSession = new AVCaptureSession ();
			captureSession.BeginConfiguration ();
			
			// Create audio connection
			NSError error;
			var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); //AudioDevice ();
			if (audioDevice == null)
				return false; // e.g. simulator

			AVCaptureDeviceInput audioIn = new AVCaptureDeviceInput (audioDevice, out error);
			if (captureSession.CanAddInput (audioIn))
				captureSession.AddInput (audioIn);
			
			AVCaptureAudioDataOutput audioOut = new AVCaptureAudioDataOutput ();

			// Add the Delegate to capture each sample that comes through
			audioOut.SetSampleBufferDelegatequeue (audioDataOutputDelegate, movieWritingQueue);
			
			if (captureSession.CanAddOutput (audioOut))
				captureSession.AddOutput (audioOut);
			
			// Create Video Session
			var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); //VideoDeviceWithPosition (AVCaptureDevicePosition.Back);
			AVCaptureDeviceInput videoIn = new AVCaptureDeviceInput (videoDevice, out error);
			
			if (captureSession.CanAddInput (videoIn))
				captureSession.AddInput (videoIn);
			
			// RosyWriter prefers to discard late video frames early in the capture pipeline, since its
			// processing can take longer than real-time on some platforms (such as iPhone 3GS).
			// Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
			// alwaysDiscardsLateVideoFrames property to NO.
			AVCaptureVideoDataOutput videoOut = new AVCaptureVideoDataOutput (){
				AlwaysDiscardsLateVideoFrames = true,
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
			};
			
			// Create a DispatchQueue for the Video Processing
			DispatchQueue videoCaptureQueue = new DispatchQueue ("Video Capture Queue");
			videoOut.SetSampleBufferDelegateAndQueue (videoDataOutputDelegate, videoCaptureQueue);
			
			if (captureSession.CanAddOutput (videoOut))
				captureSession.AddOutput (videoOut);
			
			// Set the Video connection from the Video Output object
			videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video);
			videoOrientation = videoConnection.VideoOrientation;
			
			captureSession.CommitConfiguration ();
			
			return true;
		}