예제 #1
0
        void SetCaptureType()
        {
            if (shouldCaptureFrames && captureSession.CanAddOutput(videoOutput))
            {
                captureSession.AddOutput(videoOutput);

                // We want the buffers to be in portrait orientation otherwise they are
                // rotated by 90 degrees. Need to set this _after_ addOutput()!
                var captureConnection = videoOutput.ConnectionFromMediaType(AVMediaType.Video);
                captureConnection.VideoOrientation = AVCaptureVideoOrientation.Portrait;
            }
            else if (captureSession.Outputs.Contains(videoOutput))
            {
                captureSession.RemoveOutput(videoOutput);
            }
        }
        void ConfigureAVCaptureSession()
        {
            session = new AVCaptureSession();
            session.SessionPreset = sessionPreset;

            var device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            try
            {
                var input = new AVCaptureDeviceInput(device, out var error);

                if (error != null)
                {
                    throw new Exception(error.LocalizedDescription);
                }

                session.AddInput(input);
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            var videoDataOutput      = new AVCaptureVideoDataOutput();
            var videoDataOutputQueue = new DispatchQueue("videodataqueue", false);

            videoDataOutput.SetSampleBufferDelegateQueue(this, videoDataOutputQueue);


            var settings = new AVVideoSettingsUncompressed()
            {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            videoDataOutput.WeakVideoSettings = settings.Dictionary;
            session.AddOutput(videoDataOutput);

            var connection = videoDataOutput.ConnectionFromMediaType(AVMediaType.Video);

            connection.Enabled = true;
        }
예제 #3
0
        public void BeginSession()
        {
            try
            {
                captureSession.BeginConfiguration();

                var settings = new CVPixelBufferAttributes
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                };
                videoOutput.WeakVideoSettings             = settings.Dictionary;
                videoOutput.AlwaysDiscardsLateVideoFrames = true;
                videoOutput.SetSampleBufferDelegateQueue(this, queue);

                captureSession.SessionPreset = AVCaptureSession.Preset1920x1080;
                captureSession.AddOutput(videoOutput);

                NSError err;
                var     input = new AVCaptureDeviceInput(captureDevice, out err);
                if (err != null)
                {
                    Console.WriteLine("AVCapture error: " + err);
                }
                captureSession.AddInput(input);

                videoConnection = videoOutput.ConnectionFromMediaType(AVMediaType.Video);

                captureSession.CommitConfiguration();
                captureSession.StartRunning();
                Console.WriteLine("started AV capture session");
            }
            catch
            {
                Console.WriteLine("error connecting to the capture device");
            }
        }
예제 #4
0
        private CameraPreview DispatchOpenWithPreviewAsync(Size previewRequestSize)
        {
            _session.BeginConfiguration();
            var videoOutput = new AVCaptureVideoDataOutput();
            var settings    = new AVVideoSettingsUncompressed {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            videoOutput.UncompressedVideoSetting      = settings;
            videoOutput.WeakVideoSettings             = settings.Dictionary;
            videoOutput.AlwaysDiscardsLateVideoFrames = true;

            var preview = new CameraPreview(previewRequestSize, new System.Drawing.Size(720, 1280));

            videoOutput.SetSampleBufferDelegateQueue(preview, new DispatchQueue("sample buffer"));

            _session.AddOutput(videoOutput);

            var videoConnection = videoOutput.ConnectionFromMediaType(AVMediaType.Video);

            videoConnection.VideoOrientation = AVCaptureVideoOrientation.Portrait;
            videoConnection.VideoMirrored    = true;

            _photoOutput = new AVCapturePhotoOutput
            {
                IsHighResolutionCaptureEnabled = true
            };
            _photoOutput.SetPreparedPhotoSettingsAsync(new[] { CreatePhotoSettings() });

            _session.SessionPreset = AVCaptureSession.Preset1280x720;
            _session.AddOutput(_photoOutput);
            _session.CommitConfiguration();
            _session.StartRunning();
            _isRunning = true;
            return(preview);
        }
        bool SetupCaptureSession()
        {
            //Console.WriteLine ("SetupCaptureSession");
            // Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
            // is used to deliver both audio and video buffers, and our video processing consistently takes
            // too long, the delivery queue can back up, resulting in audio being dropped.
            //
            // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
            // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
            //
            // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.

            // Create Capture session
            captureSession = new AVCaptureSession();
            captureSession.BeginConfiguration();

            // Create audio connection
            NSError error;
            var     audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);

            if (audioDevice == null)
            {
                return(false);                // e.g. simulator
            }
            var audioIn = new AVCaptureDeviceInput(audioDevice, out error);

            if (captureSession.CanAddInput(audioIn))
            {
                captureSession.AddInput(audioIn);
            }

            var audioOut          = new AVCaptureAudioDataOutput();
            var audioCaptureQueue = new DispatchQueue("Audio Capture Queue");

            // Add the Delegate to capture each sample that comes through
            audioOut.SetSampleBufferDelegateQueue(this, audioCaptureQueue);

            if (captureSession.CanAddOutput(audioOut))
            {
                captureSession.AddOutput(audioOut);
            }

            audioConnection = audioOut.ConnectionFromMediaType(AVMediaType.Audio);

            // Create Video Session
            var videoDevice = VideoDeviceWithPosition(AVCaptureDevicePosition.Back);
            var videoIn     = new AVCaptureDeviceInput(videoDevice, out error);

            if (captureSession.CanAddInput(videoIn))
            {
                captureSession.AddInput(videoIn);
            }

            // RosyWriter prefers to discard late video frames early in the capture pipeline, since its
            // processing can take longer than real-time on some platforms (such as iPhone 3GS).
            // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
            // alwaysDiscardsLateVideoFrames property to NO.
            var videoOut = new AVCaptureVideoDataOutput {
                AlwaysDiscardsLateVideoFrames = true,
                // HACK: Change VideoSettings to WeakVideoSettings, and AVVideoSettings to CVPixelBufferAttributes
                // VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
                WeakVideoSettings = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }.Dictionary
            };

            // Create a DispatchQueue for the Video Processing
            var videoCaptureQueue = new DispatchQueue("Video Capture Queue");

            videoOut.SetSampleBufferDelegateQueue(this, videoCaptureQueue);

            if (captureSession.CanAddOutput(videoOut))
            {
                captureSession.AddOutput(videoOut);
            }

            // Set the Video connection from the Video Output object
            videoConnection  = videoOut.ConnectionFromMediaType(AVMediaType.Video);
            videoOrientation = videoConnection.VideoOrientation;

            captureSession.CommitConfiguration();

            return(true);
        }
	    bool SetupCaptureSession ()
		{
			//Console.WriteLine ("SetupCaptureSession");
			// Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
			// is used to deliver both audio and video buffers, and our video processing consistently takes
			// too long, the delivery queue can back up, resulting in audio being dropped.
			// 
			// When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
			// that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
			//				
			// RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.
			
			// Create Capture session
			captureSession = new AVCaptureSession ();
			captureSession.BeginConfiguration ();
			
			// Create audio connection
			NSError error;
			var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio);
			if (audioDevice == null)
				return false; // e.g. simulator

			var audioIn = new AVCaptureDeviceInput (audioDevice, out error);
			if (captureSession.CanAddInput (audioIn))
				captureSession.AddInput (audioIn);
			
			var audioOut = new AVCaptureAudioDataOutput ();
			var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue");

			// Add the Delegate to capture each sample that comes through
			audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue);
			
			if (captureSession.CanAddOutput (audioOut))
				captureSession.AddOutput (audioOut);
			
			audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio);
					
			// Create Video Session
			var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back);
			var videoIn = new AVCaptureDeviceInput (videoDevice, out error);
			
			if (captureSession.CanAddInput (videoIn))
				captureSession.AddInput (videoIn);
			
			// RosyWriter prefers to discard late video frames early in the capture pipeline, since its
			// processing can take longer than real-time on some platforms (such as iPhone 3GS).
			// Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
			// alwaysDiscardsLateVideoFrames property to NO.
			var videoOut = new AVCaptureVideoDataOutput {
				AlwaysDiscardsLateVideoFrames = true,
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
			};
			
			// Create a DispatchQueue for the Video Processing
			var videoCaptureQueue = new DispatchQueue ("Video Capture Queue");
			videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue);
			
			if (captureSession.CanAddOutput (videoOut))
				captureSession.AddOutput (videoOut);
			
			// Set the Video connection from the Video Output object
			videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video);
			videoOrientation = videoConnection.VideoOrientation;
			
			captureSession.CommitConfiguration ();
			
			return true;
		}
예제 #7
0
        void EnableCameraView()
        {
            if (captureSession != null)
            {
                return;
            }

            captureSession = new AVCaptureSession
            {
                SessionPreset = AVCaptureSession.Preset1280x720
            };

            captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);
            if (captureDevice == null)
            {
                ReleaseCaptureSession();
                return;
            }

            captureDevice.LockForConfiguration(out NSError error);

            var    format  = captureDevice.ActiveFormat;
            double epsilon = 0.00000001;

            var desiredFrameRate = 30;

            captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15);
            foreach (var range in format.VideoSupportedFrameRateRanges)
            {
                if (range.MinFrameRate <= (desiredFrameRate + epsilon) && range.MaxFrameRate >= (desiredFrameRate - epsilon))
                {
                    var duration = new CMTime(1, desiredFrameRate, 0)
                    {
                        TimeFlags = CMTime.Flags.Valid
                    };
                    var minDuration = new CMTime(1, (int)range.MinFrameRate, 0)
                    {
                        TimeFlags = CMTime.Flags.Valid
                    };
                    captureDevice.ActiveVideoMaxFrameDuration = duration;
                    captureDevice.ActiveVideoMinFrameDuration = duration;
                    break;
                }
            }

            captureDevice.UnlockForConfiguration();

            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                ReleaseCaptureSession();
                ReleaseCaptureDevice();
                return;
            }

            captureSession.AddInput(input);
            captureVideoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                BackgroundColor = UIColor.Black.CGColor,
                VideoGravity    = AVLayerVideoGravity.ResizeAspectFill,
                Frame           = UIScreen.MainScreen.Bounds
            };
            captureVideoPreviewLayer.Connection.VideoOrientation = CameraHelper.VideoOrientationFromCurrentDeviceOrientation();

            View.Layer.InsertSublayer(captureVideoPreviewLayer, 0);

            var settings = new CVPixelBufferAttributes
            {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            using (var output = new AVCaptureVideoDataOutput {
                WeakVideoSettings = settings.Dictionary
            })
            {
                queue          = new DispatchQueue("cameraoutputqueue");
                outputRecorder = new CustomOutputRecorder();
                output.AlwaysDiscardsLateVideoFrames = true;
                output.SetSampleBufferDelegateQueue(outputRecorder, queue);
                captureSession.AddOutput(output);
                var connection = output.ConnectionFromMediaType(AVMediaType.Video);
                if (connection != null)
                {
                    connection.VideoOrientation = CameraHelper.VideoOrientationFromCurrentDeviceOrientation();
                }
            }

            captureSession.StartRunning();
        }
        public void SetupCamera()
        {
            AVCaptureDevice captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaTypes.Video, AVCaptureDevicePosition.Back);

            if (captureDevice == null)
            {
                Console.WriteLine("Could not create capture device");
                return;
            }
            CaptureDevice = captureDevice;
            if (captureDevice.SupportsAVCaptureSessionPreset(AVCaptureSession.Preset3840x2160))
            {
                captureSession.SessionPreset = AVCaptureSession.Preset3840x2160;
                BufferAspectRatio            = 3840.0 / 2160.0;
            }
            else
            {
                captureSession.SessionPreset = AVCaptureSession.Preset1920x1080;
                BufferAspectRatio            = 1920.0 / 1080.0;
            }

            AVCaptureDeviceInput deviceInput = new AVCaptureDeviceInput(captureDevice, out NSError deviceInputErr);

            if (deviceInputErr != null)
            {
                Console.WriteLine("Could not create device input");
                deviceInputErr.Dispose();
                return;
            }
            if (captureSession.CanAddInput(deviceInput))
            {
                captureSession.AddInput(deviceInput);
            }

            VideoDataOutput.AlwaysDiscardsLateVideoFrames = true;
            VideoDataOutput.SetSampleBufferDelegateQueue(this, VideoDataOutputQueue);
            //VideoDataOutput.WeakVideoSettings = new NSDictionary<NSString, NSString> ();
            //VideoDataOutput.WeakVideoSettings.TryAdd<NSString, NSString> (CVPixelBuffer.PixelFormatTypeKey, OSType);

            if (captureSession.CanAddOutput(VideoDataOutput))
            {
                captureSession.AddOutput(VideoDataOutput);
                VideoDataOutput.ConnectionFromMediaType(AVMediaType.Video).PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Off;
            }
            else
            {
                Console.WriteLine("Could not add VDO output");
            }

            _ = captureDevice.LockForConfiguration(out NSError lockConf);
            if (lockConf != null)
            {
                Console.WriteLine("Could not set zoom level due to error: " + lockConf);
                lockConf.Dispose();
                return;
            }
            captureDevice.VideoZoomFactor           = 2;
            captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near;
            captureDevice.UnlockForConfiguration();

            captureSession.StartRunning();
        }
예제 #9
0
        protected virtual void SetupAVCapture()
        {
            AVCaptureDeviceInput deviceInput;

            // Select a video device, make an input
            var videoDevice = AVCaptureDeviceDiscoverySession.Create(
                new AVCaptureDeviceType[] { AVCaptureDeviceType.BuiltInWideAngleCamera },
                AVMediaType.Video,
                AVCaptureDevicePosition.Back
                ).Devices.FirstOrDefault();

            deviceInput = new AVCaptureDeviceInput(videoDevice, out NSError error);
            if (error != null)
            {
                Console.WriteLine($"Could not create video device input: {error.LocalizedDescription}");
                return;
            }

            session.BeginConfiguration();
            session.SessionPreset = AVCaptureSession.Preset640x480; // Model image size is smaller

            // Add a video input
            if (!session.CanAddInput(deviceInput))
            {
                Console.WriteLine("Could not add video device input to the session");
                session.CommitConfiguration();
                return;
            }
            session.AddInput(deviceInput);

            if (session.CanAddOutput(videoDataOutput))
            {
                session.AddOutput(videoDataOutput);
                // Add a video data ouptut
                videoDataOutput.AlwaysDiscardsLateVideoFrames = true;
                videoDataOutput.WeakVideoSettings             = new NSDictionary(CVPixelBuffer.PixelFormatTypeKey, CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange);
                videoDataOutput.SetSampleBufferDelegateQueue(this, videoDataOutputQueue);
            }
            else
            {
                Console.WriteLine("Could not add video data output to the session");
                session.CommitConfiguration();
                return;
            }

            var captureConnection = videoDataOutput.ConnectionFromMediaType(AVMediaType.Video);

            // Always process the frames
            captureConnection.Enabled = true;
            videoDevice.LockForConfiguration(out NSError error2);
            if (error2 == null)
            {
                var formatDescription        = videoDevice.ActiveFormat.FormatDescription as CMVideoFormatDescription;
                CMVideoDimensions dimensions = formatDescription.Dimensions;
                bufferSize.Width  = dimensions.Width;
                bufferSize.Height = dimensions.Height;
                videoDevice.UnlockForConfiguration();
            }
            else
            {
                Console.WriteLine($"{error2.LocalizedDescription}");
            }
            session.CommitConfiguration();
            previewLayer = AVCaptureVideoPreviewLayer.FromSession(session);
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            rootLayer          = previewView.Layer;
            previewLayer.Frame = rootLayer.Bounds;
            rootLayer.AddSublayer(previewLayer);
        }