private void InitialiseCaptureSession()
        {
            try
            {
                _captureSession.SessionPreset = AVCaptureSession.Preset1920x1080;
                var     captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video) as AVCaptureDevice;
                NSError error;
                var     input = new AVCaptureDeviceInput(captureDevice, out error);
                if (error?.Code != 0)
                {
                    Console.WriteLine($"Error {error.ToString()}");
                }

                if (_captureSession.CanAddInput(input))
                {
                    _captureSession.AddInput(input);
                }

                var videoOutput = new AVCaptureVideoDataOutput();
                videoOutput.SetSampleBufferDelegateQueue(this, new DispatchQueue("sample buffer delegate"));

                if (_captureSession.CanAddOutput(videoOutput))
                {
                    _captureSession.AddOutput(videoOutput);
                }

                _captureSession.StartRunning();
            }
            catch (Exception ex)
            {
                int i = 0;
                i++;
            }
        }
예제 #2
0
        private bool addImageSamplerOutput(out string errorMessage, int minimumSampleIntervalInMilliSeconds)
        {
            errorMessage = "";

            // create a VideoDataOutput and add it to the capture session
            frameGrabberOutput = new AVCaptureVideoDataOutput();
            frameGrabberOutput.WeakVideoSettings = new CVPixelBufferAttributes()
            {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            }.Dictionary;
            // set up the output queue and delegate
            queue             = new CoreFoundation.DispatchQueue("captureQueue");
            videoFrameSampler = new VideoFrameSamplerDelegate();
            frameGrabberOutput.SetSampleBufferDelegateQueue(videoFrameSampler, queue);

            // subscribe to from capture events
            videoFrameSampler.CaptureError  += new EventHandler <CaptureErrorEventArgs>(handleImageCaptureError);
            videoFrameSampler.ImageCaptured += new EventHandler <ImageCaptureEventArgs>(handleImageCaptured);

            // add the output to the session
            session.AddOutput(frameGrabberOutput);

            // set minimum time interval between image samples (if possible).
            try
            {
                AVCaptureConnection connection = (AVCaptureConnection)frameGrabberOutput.Connections[0];
                connection.VideoMinFrameDuration = new CMTime(minimumSampleIntervalInMilliSeconds, 1000);
            }
            catch
            {
            }

            return(true);
        }
예제 #3
0
        private void PrepareCaptureSession()
        {
            try
            {
                session = new AVCaptureSession
                {
                    SessionPreset = config.FrameQualityPreset,
                };

                captureDevice = config.Device;

                deviceInput  = AVCaptureDeviceInput.FromDevice(captureDevice);
                deviceOutput = new AVCaptureVideoDataOutput();

                deviceOutput.WeakVideoSettings = new CVPixelBufferAttributes {
                    PixelFormatType = config.PixelFormat
                }.Dictionary;
                deviceOutput.SetSampleBufferDelegateQueue(this, queue);

                session.AddInput(deviceInput);
                session.AddOutput(deviceOutput);
            }
            catch (Exception ex)
            {
                System.Console.WriteLine(ex.Message);
            }
        }
예제 #4
0
        void setupVideoOutputCapture()
        {
            // create a VideoDataOutput and add it to the sesion
            var videoOut = new AVCaptureVideoDataOutput()
            {
                AlwaysDiscardsLateVideoFrames = true,
                WeakVideoSettings             = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }.Dictionary
            };

            if (session.CanAddOutput(videoOut))
            {
                session.AddOutput(videoOut);
            }

            var OutputSampleDelegate = new VideoCapture(
                (s) => {
                Console.WriteLine("greetings Callback");
                //GreetingsLabel.Text = s;
                //PopulateList(s);
            }, new Action <UIImage, CGRect>(DrawFaces));

            videoOut.SetSampleBufferDelegateQueue(OutputSampleDelegate, sessionQueue);
        }
예제 #5
0
        private void BeginSession()
        {
            try
            {
                var settings = new CVPixelBufferAttributes
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                };
                videoOutput.WeakVideoSettings             = settings.Dictionary;
                videoOutput.AlwaysDiscardsLateVideoFrames = true;
                videoOutput.SetSampleBufferDelegateQueue(Delegate, queue);

                Session.SessionPreset = AVCaptureSession.Preset1920x1080;
                Session.AddOutput(videoOutput);

                var input = new AVCaptureDeviceInput(captureDevice, out var err);
                if (err != null)
                {
                    Console.Error.WriteLine("AVCapture error: " + err);
                }
                Session.AddInput(input);

                Session.StartRunning();
                Console.WriteLine("started AV capture session");
            }
            catch
            {
                Console.Error.WriteLine("error connecting to the capture device");
            }
        }
        bool AddImageSamplerOutput(out string errorMessage, int minimumSampleIntervalInMilliSeconds)
        {
            errorMessage = string.Empty;

            // create a VideoDataOutput and add it to the capture session
            frameGrabberOutput = new AVCaptureVideoDataOutput();
            frameGrabberOutput.WeakVideoSettings = new CVPixelBufferAttributes {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            }.Dictionary;
            // set up the output queue and delegate
            queue             = new DispatchQueue("captureQueue");
            videoFrameSampler = new VideoFrameSamplerDelegate();
            frameGrabberOutput.SetSampleBufferDelegateQueue(videoFrameSampler, queue);

            // subscribe to from capture events
            videoFrameSampler.CaptureError  += HandleImageCaptureError;
            videoFrameSampler.ImageCaptured += HandleImageCaptured;

            // add the output to the session
            session.AddOutput(frameGrabberOutput);

            // set minimum time interval between image samples (if possible).
            try {
                AVCaptureConnection connection = frameGrabberOutput.Connections[0];
                connection.VideoMinFrameDuration = new CMTime(minimumSampleIntervalInMilliSeconds, 1000);
            } catch (Exception ex) {
                Console.WriteLine(ex.Message);
            }

            return(true);
        }
        bool SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };

            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                Image <Bgr, Byte> img = new Image <Bgr, byte> (512, 512, new Bgr(255, 255, 255));
                CvInvoke.PutText(
                    img,
                    "Capture device not found.",
                    new Point(10, 200),
                    FontFace.HersheyComplex,
                    1,
                    new MCvScalar(),
                    2);
                ImageView.Image = img.ToUIImage();
                return(false);
            }
            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input device");
                return(false);
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();

            settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
            var output = new AVCaptureVideoDataOutput()
            {
                UncompressedVideoSetting = settingUncomp,

                // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
                //MinFrameDuration = new CMTime (1, 15)
            };


            // configure the output
            queue          = new DispatchQueue("myQueue");
            outputRecorder = new OutputRecorder(ImageView);
            output.SetSampleBufferDelegateQueue(outputRecorder, queue);
            session.AddOutput(output);

            session.StartRunning();
            return(true);
        }
예제 #8
0
        private void SetupAVCapture(NSString sessionPreset)
        {
            if ((this.VideoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(this.context)) == null)
            {
                Console.WriteLine("Could not create the CoreVideo TextureCache");
                return;
            }

            this.session = new AVCaptureSession();
            this.session.BeginConfiguration();

            // Preset size
            this.session.SessionPreset = sessionPreset;

            // Input device
            var videoDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            if (videoDevice == null)
            {
                Console.WriteLine("No video device");
                return;
            }

            var input = new AVCaptureDeviceInput(videoDevice, out NSError error);

            if (error != null)
            {
                Console.WriteLine("Error creating video capture device");
                return;
            }

            this.session.AddInput(input);

            // Create the output device
            using (var dataOutput = new AVCaptureVideoDataOutput())
            {
                dataOutput.AlwaysDiscardsLateVideoFrames = true;

                // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of
                // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
                //VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)

                this.dataOutputDelegate = new DataOutputDelegate(this);

                //
                // This dispatches the video frames into the main thread, because the OpenGL
                // code is accessing the data synchronously.
                //
                dataOutput.SetSampleBufferDelegateQueue(this.dataOutputDelegate, DispatchQueue.MainQueue);
                this.session.AddOutput(dataOutput);
            }

            this.session.CommitConfiguration();
            this.session.StartRunning();
        }
예제 #9
0
        public void ConfigureDeviceAndStart()
        {
            var device = GetDevice();

            if (device == null)
            {
                return;
            }

            try
            {
                if (device.LockForConfiguration(out var error))
                {
                    if (device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
                    {
                        device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                    }

                    device.UnlockForConfiguration();
                }

                // Configure Input
                var input = AVCaptureDeviceInput.FromDevice(device, out var error2);
                _captureSession.AddInput(input);

                // Configure Output
                var settings = new AVVideoSettingsUncompressed()
                {
                    PixelFormatType = CoreVideo.CVPixelFormatType.CV32BGRA
                };

                var videoOutput = new AVCaptureVideoDataOutput
                {
                    WeakVideoSettings             = settings.Dictionary,
                    AlwaysDiscardsLateVideoFrames = true
                };

                var videoCaptureQueue = new DispatchQueue("Video Queue");
                videoOutput.SetSampleBufferDelegateQueue(new OutputRecorder(View, _shapeLayer), videoCaptureQueue);

                if (_captureSession.CanAddOutput(videoOutput))
                {
                    _captureSession.AddOutput(videoOutput);
                }

                // Start session
                _captureSession.StartRunning();
            }
            catch (Exception e)
            {
                Console.Write(e);
            }
        }
예제 #10
0
        private void SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };



            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                RenderImageMessage("Capture device not found.");

                return;
            }
            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                RenderImageMessage("No input device");

                return;
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();

            settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
            var output = new AVCaptureVideoDataOutput()
            {
                UncompressedVideoSetting = settingUncomp,

                // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
                //MinFrameDuration = new CMTime (1, 15)
            };


            // configure the output
            queue          = new DispatchQueue("myQueue");
            outputRecorder = new OutputRecorder(ImageView);
            output.SetSampleBufferDelegateQueue(outputRecorder, queue);
            session.AddOutput(output);

            session.StartRunning();
        }
예제 #11
0
        void SetupVideoCapture()
        {
            var settings = new AVVideoSettingsUncompressed();

            settings.PixelFormatType = CVPixelFormatType.CV32BGRA;

            videoCaptureDelegate = new XCameraVideoOutputDelegate(FrameCapturedHandler);
            queue = new DispatchQueue("XCamera.CameraQueue");

            videoOutput = new AVCaptureVideoDataOutput();
            videoOutput.UncompressedVideoSetting      = settings;
            videoOutput.AlwaysDiscardsLateVideoFrames = true;
            videoOutput.SetSampleBufferDelegateQueue(videoCaptureDelegate, queue);
        }
예제 #12
0
        void Initialize()
        {
            Configuration.IsScanning = true;
            SizeChange();
            CaptureSession = new AVCaptureSession();
            CaptureSession.BeginConfiguration();
            this.Frame   = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Width);
            previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame        = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Width),
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill,
            };
            setPreviewOrientation();
            var videoDevices   = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = AVCaptureDevicePosition.Back;
            //var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);

            if (device == null)
            {
                return;
            }

            NSError error;
            var     input = new AVCaptureDeviceInput(device, out error);

            CaptureSession.AddInput(input);
            CaptureSession.SessionPreset = AVFoundation.AVCaptureSession.Preset640x480;
            Layer.AddSublayer(previewLayer);

            CaptureSession.CommitConfiguration();
            CaptureSession.StartRunning();

            VideoDataOutput = new AVCaptureVideoDataOutput();
            VideoDataOutput.AlwaysDiscardsLateVideoFrames = true;
            VideoDataOutput.WeakVideoSettings             = new CVPixelBufferAttributes {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            }.Dictionary;


            captureVideoDelegate             = new CaptureVideoDelegate();
            captureVideoDelegate.OnDetected += (list) =>
            {
                this.OnDetected?.Invoke(list);
                CaptureSession.StopRunning();
            };
            VideoDataOutput.SetSampleBufferDelegateQueue(captureVideoDelegate, CoreFoundation.DispatchQueue.MainQueue);

            CaptureSession.AddOutput(VideoDataOutput);
        }
예제 #13
0
        void PrepareSession()
        {
            session = new AVCaptureSession();
            var captureDevice = frontCamera;

            if (session == null || captureDevice == null)
            {
                return;
            }

            try {
                var deviceInput = new AVCaptureDeviceInput(captureDevice, out var deviceInputError);
                if (deviceInputError != null)
                {
                    throw new NSErrorException(deviceInputError);
                }

                session.BeginConfiguration();

                if (session.CanAddInput(deviceInput))
                {
                    session.AddInput(deviceInput);
                }

                var output = new AVCaptureVideoDataOutput {
                    UncompressedVideoSetting = new AVVideoSettingsUncompressed {
                        PixelFormatType = CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange
                    },
                    AlwaysDiscardsLateVideoFrames = true
                };

                if (session.CanAddOutput(output))
                {
                    session.AddOutput(output);
                }

                session.CommitConfiguration();

                var queue = new DispatchQueue("output.queue");
                output.SetSampleBufferDelegateQueue(this, queue);

                Console.WriteLine($"PrepareSession: Done setting up delegate");
            } catch (Exception ex) {
                Console.WriteLine($"PrepareSession Error: {ex.Message}");
            }
        }
예제 #14
0
        public void SetupLiveCameraStream()
        {
            try
            {
                AVCaptureDevice frontCamera = null;

                var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
                frontCamera = devices.FirstOrDefault(device => device.Position == AVCaptureDevicePosition.Front); //Choose the front camera.


                ConfigureCameraForDevice(frontCamera);
                captureSession = new AVCaptureSession();
                var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
                captureDeviceInput = AVCaptureDeviceInput.FromDevice(frontCamera);
                captureSession.AddInput(captureDeviceInput);
                var dictionary = new NSMutableDictionary();
                dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);

                //Configuring the ouput for frame to be captured.
                var settings = new CVPixelBufferAttributes
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                };
                using (var output = new AVCaptureVideoDataOutput {
                    WeakVideoSettings = settings.Dictionary
                })
                {
                    queue          = new DispatchQueue("myQueue");
                    outputRecorder = new OutputRecorder();
                    output.SetSampleBufferDelegateQueue(outputRecorder, queue);
                    captureSession.AddOutput(output);
                }

                captureSession.StartRunning();

                DebugHelper.DisplayAnnouncement("CameraStream activated");
            }
            catch (Exception e)
            {
                DebugHelper.DisplayError(e);
            }
        }
        void ConfigureAVCaptureSession()
        {
            session = new AVCaptureSession();
            session.SessionPreset = sessionPreset;

            var device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            try
            {
                var input = new AVCaptureDeviceInput(device, out var error);

                if (error != null)
                {
                    throw new Exception(error.LocalizedDescription);
                }

                session.AddInput(input);
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            var videoDataOutput      = new AVCaptureVideoDataOutput();
            var videoDataOutputQueue = new DispatchQueue("videodataqueue", false);

            videoDataOutput.SetSampleBufferDelegateQueue(this, videoDataOutputQueue);


            var settings = new AVVideoSettingsUncompressed()
            {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            videoDataOutput.WeakVideoSettings = settings.Dictionary;
            session.AddOutput(videoDataOutput);

            var connection = videoDataOutput.ConnectionFromMediaType(AVMediaType.Video);

            connection.Enabled = true;
        }
예제 #16
0
        private void _setupOutputs()
        {
            captureSession.BeginConfiguration();

            if (this.tensorflowAnalysis)
            {
                //colors = new int[TensorflowLiteService.ModelInputSize * TensorflowLiteService.ModelInputSize];

                this.captureDelegate = new VideoCaptureDelegate(OnFrameCaptured);

                this.videoOutput = new AVCaptureVideoDataOutput();

                var settings = new CVPixelBufferAttributes
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA,
                };
                videoOutput.WeakVideoSettings             = settings.Dictionary;
                videoOutput.AlwaysDiscardsLateVideoFrames = true;
                videoOutput.SetSampleBufferDelegateQueue(captureDelegate, queue);

                captureSession.AddOutput(videoOutput);
            }
            else
            {
                stillImageOutput = new AVCaptureStillImageOutput();
                captureSession.AddOutput(stillImageOutput);
            }

            captureSession.CommitConfiguration();

            if (this.tensorflowAnalysis)
            {
                _updateCameraQualityMode(AVCaptureSession.Preset352x288);
            }
            else
            {
                _updateCameraQualityMode(AVCaptureSession.PresetHigh);
            }
        }
        /// <summary>
        /// Starts a session with the camera, and creates the classes
        /// needed to view a video preview, and capture a still image.
        /// </summary>
        public void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession()
            {
                SessionPreset = new NSString(AVCaptureSession.PresetHigh)
            };
            videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame       = View.Frame,
                Orientation = GetCaptureOrientation(UIApplication.SharedApplication.StatusBarOrientation)
            };
            View.Layer.AddSublayer(videoPreviewLayer);

            AVCaptureDevice captureDevice =
                GetCameraForOrientation(AVCaptureDevicePosition.Back) ??
                GetCameraForOrientation(AVCaptureDevicePosition.Front) ??
                GetCameraForOrientation(AVCaptureDevicePosition.Unspecified);

            if (captureDevice == null)
            {
                (Element as LabelReader).CameraError(LabelReaderConstants.NoCameraMessage);
                return;
            }

            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            captureSession.AddInput(captureDeviceInput);

            videoDataOutput = new AVCaptureVideoDataOutput();

            videoDataOutput.SetSampleBufferDelegateQueue(this, new CoreFoundation.DispatchQueue("frameQueue"));

            captureSession.AddOutput(videoDataOutput);
            captureSession.StartRunning();

            // set last processed time to now so the handler for video frames will wait an appropriate length of time
            // before processing images.
            lastImageProcessedTime = DateTime.Now;
        }
예제 #18
0
        public void BeginSession()
        {
            try
            {
                captureSession.BeginConfiguration();

                var settings = new CVPixelBufferAttributes
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                };
                videoOutput.WeakVideoSettings             = settings.Dictionary;
                videoOutput.AlwaysDiscardsLateVideoFrames = true;
                videoOutput.SetSampleBufferDelegateQueue(this, queue);

                captureSession.SessionPreset = AVCaptureSession.Preset1920x1080;
                captureSession.AddOutput(videoOutput);

                NSError err;
                var     input = new AVCaptureDeviceInput(captureDevice, out err);
                if (err != null)
                {
                    Console.WriteLine("AVCapture error: " + err);
                }
                captureSession.AddInput(input);

                videoConnection = videoOutput.ConnectionFromMediaType(AVMediaType.Video);

                captureSession.CommitConfiguration();
                captureSession.StartRunning();
                Console.WriteLine("started AV capture session");
            }
            catch
            {
                Console.WriteLine("error connecting to the capture device");
            }
        }
예제 #19
0
        private CameraPreview DispatchOpenWithPreviewAsync(Size previewRequestSize)
        {
            _session.BeginConfiguration();
            var videoOutput = new AVCaptureVideoDataOutput();
            var settings    = new AVVideoSettingsUncompressed {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            videoOutput.UncompressedVideoSetting      = settings;
            videoOutput.WeakVideoSettings             = settings.Dictionary;
            videoOutput.AlwaysDiscardsLateVideoFrames = true;

            var preview = new CameraPreview(previewRequestSize, new System.Drawing.Size(720, 1280));

            videoOutput.SetSampleBufferDelegateQueue(preview, new DispatchQueue("sample buffer"));

            _session.AddOutput(videoOutput);

            var videoConnection = videoOutput.ConnectionFromMediaType(AVMediaType.Video);

            videoConnection.VideoOrientation = AVCaptureVideoOrientation.Portrait;
            videoConnection.VideoMirrored    = true;

            _photoOutput = new AVCapturePhotoOutput
            {
                IsHighResolutionCaptureEnabled = true
            };
            _photoOutput.SetPreparedPhotoSettingsAsync(new[] { CreatePhotoSettings() });

            _session.SessionPreset = AVCaptureSession.Preset1280x720;
            _session.AddOutput(_photoOutput);
            _session.CommitConfiguration();
            _session.StartRunning();
            _isRunning = true;
            return(preview);
        }
	    bool SetupCaptureSession ()
		{
			//Console.WriteLine ("SetupCaptureSession");
			// Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
			// is used to deliver both audio and video buffers, and our video processing consistently takes
			// too long, the delivery queue can back up, resulting in audio being dropped.
			// 
			// When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
			// that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
			//				
			// RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.
			
			// Create Capture session
			captureSession = new AVCaptureSession ();
			captureSession.BeginConfiguration ();
			
			// Create audio connection
			NSError error;
			var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio);
			if (audioDevice == null)
				return false; // e.g. simulator

			var audioIn = new AVCaptureDeviceInput (audioDevice, out error);
			if (captureSession.CanAddInput (audioIn))
				captureSession.AddInput (audioIn);
			
			var audioOut = new AVCaptureAudioDataOutput ();
			var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue");

			// Add the Delegate to capture each sample that comes through
			audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue);
			
			if (captureSession.CanAddOutput (audioOut))
				captureSession.AddOutput (audioOut);
			
			audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio);
					
			// Create Video Session
			var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back);
			var videoIn = new AVCaptureDeviceInput (videoDevice, out error);
			
			if (captureSession.CanAddInput (videoIn))
				captureSession.AddInput (videoIn);
			
			// RosyWriter prefers to discard late video frames early in the capture pipeline, since its
			// processing can take longer than real-time on some platforms (such as iPhone 3GS).
			// Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
			// alwaysDiscardsLateVideoFrames property to NO.
			var videoOut = new AVCaptureVideoDataOutput {
				AlwaysDiscardsLateVideoFrames = true,
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
			};
			
			// Create a DispatchQueue for the Video Processing
			var videoCaptureQueue = new DispatchQueue ("Video Capture Queue");
			videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue);
			
			if (captureSession.CanAddOutput (videoOut))
				captureSession.AddOutput (videoOut);
			
			// Set the Video connection from the Video Output object
			videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video);
			videoOrientation = videoConnection.VideoOrientation;
			
			captureSession.CommitConfiguration ();
			
			return true;
		}
		void SetupAVCapture (NSString sessionPreset)
		{
			if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (context)) == null){
				Console.WriteLine ("Could not create the CoreVideo TextureCache");
				return;
			}
			session = new AVCaptureSession ();
			session.BeginConfiguration ();

			// Preset size
			session.SessionPreset = sessionPreset;

			// Input device
			var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			if (videoDevice == null){
				Console.WriteLine ("No video device");
				return;
			}
			NSError err;
			var input = new AVCaptureDeviceInput (videoDevice, out err);
			if (err != null){
				Console.WriteLine ("Error creating video capture device");
				return;
			}
			session.AddInput (input);

			// Create the output device
			var dataOutput = new AVCaptureVideoDataOutput () {
				AlwaysDiscardsLateVideoFrames = true,

				// YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of
				// memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
				//VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)
			};

			dataOutputDelegate = new DataOutputDelegate (this);

			//
			// This dispatches the video frames into the main thread, because the OpenGL
			// code is accessing the data synchronously.
			//
			dataOutput.SetSampleBufferDelegateQueue (dataOutputDelegate, DispatchQueue.MainQueue);
			session.AddOutput (dataOutput);
			session.CommitConfiguration ();
			session.StartRunning ();
		}
예제 #22
0
        private void BeginSession()
        {
            NSError error       = null;
            var     deviceInput = new AVCaptureDeviceInput(captureDevice, out error);

            if (error == null && captureSession.CanAddInput(deviceInput))
            {
                captureSession.AddInput(deviceInput);
            }
            previewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                VideoGravity = AVLayerVideoGravity.ResizeAspect
            };
            //this.HomeView.BackgroundColor = UIColor.Black;
            previewLayer.Frame = this.HomeView.Layer.Bounds;

            this.HomeView.Layer.AddSublayer(previewLayer);



            captureDevice.LockForConfiguration(out error);
            if (error != null)
            {
                Console.WriteLine(error);
                captureDevice.UnlockForConfiguration();
                return;
            }

            if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0))
            {
                captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15);
            }
            captureDevice.UnlockForConfiguration();

            captureSession.StartRunning();

            // create a VideoDataOutput and add it to the sesion
            videoOut = new AVCaptureVideoDataOutput()
            {
                AlwaysDiscardsLateVideoFrames = true,
                WeakVideoSettings             = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }.Dictionary
            };



            if (captureSession.CanAddOutput(videoOut))
            {
                captureSession.AddOutput(videoOut);
            }


            captureSession.CommitConfiguration();

            var OutputSampleDelegate = new OutputSampleDelegate(
                (s) =>
            {
                GreetingsLabel.Text = s;
            }, new Action <CIImage, CGRect>(DrawFaces));

            videoOut.SetSampleBufferDelegateQueue(OutputSampleDelegate, sessionQueue);
        }
예제 #23
0
        protected virtual void SetupAVCapture()
        {
            AVCaptureDeviceInput deviceInput;

            // Select a video device, make an input
            var videoDevice = AVCaptureDeviceDiscoverySession.Create(
                new AVCaptureDeviceType[] { AVCaptureDeviceType.BuiltInWideAngleCamera },
                AVMediaType.Video,
                AVCaptureDevicePosition.Back
                ).Devices.FirstOrDefault();

            deviceInput = new AVCaptureDeviceInput(videoDevice, out NSError error);
            if (error != null)
            {
                Console.WriteLine($"Could not create video device input: {error.LocalizedDescription}");
                return;
            }

            session.BeginConfiguration();
            session.SessionPreset = AVCaptureSession.Preset640x480; // Model image size is smaller

            // Add a video input
            if (!session.CanAddInput(deviceInput))
            {
                Console.WriteLine("Could not add video device input to the session");
                session.CommitConfiguration();
                return;
            }
            session.AddInput(deviceInput);

            if (session.CanAddOutput(videoDataOutput))
            {
                session.AddOutput(videoDataOutput);
                // Add a video data ouptut
                videoDataOutput.AlwaysDiscardsLateVideoFrames = true;
                videoDataOutput.WeakVideoSettings             = new NSDictionary(CVPixelBuffer.PixelFormatTypeKey, CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange);
                videoDataOutput.SetSampleBufferDelegateQueue(this, videoDataOutputQueue);
            }
            else
            {
                Console.WriteLine("Could not add video data output to the session");
                session.CommitConfiguration();
                return;
            }

            var captureConnection = videoDataOutput.ConnectionFromMediaType(AVMediaType.Video);

            // Always process the frames
            captureConnection.Enabled = true;
            videoDevice.LockForConfiguration(out NSError error2);
            if (error2 == null)
            {
                var formatDescription        = videoDevice.ActiveFormat.FormatDescription as CMVideoFormatDescription;
                CMVideoDimensions dimensions = formatDescription.Dimensions;
                bufferSize.Width  = dimensions.Width;
                bufferSize.Height = dimensions.Height;
                videoDevice.UnlockForConfiguration();
            }
            else
            {
                Console.WriteLine($"{error2.LocalizedDescription}");
            }
            session.CommitConfiguration();
            previewLayer = AVCaptureVideoPreviewLayer.FromSession(session);
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            rootLayer          = previewView.Layer;
            previewLayer.Frame = rootLayer.Bounds;
            rootLayer.AddSublayer(previewLayer);
        }
예제 #24
0
        void EnableCameraView()
        {
            if (captureSession != null)
            {
                return;
            }

            captureSession = new AVCaptureSession
            {
                SessionPreset = AVCaptureSession.Preset1280x720
            };

            captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);
            if (captureDevice == null)
            {
                ReleaseCaptureSession();
                return;
            }

            captureDevice.LockForConfiguration(out NSError error);

            var    format  = captureDevice.ActiveFormat;
            double epsilon = 0.00000001;

            var desiredFrameRate = 30;

            captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15);
            foreach (var range in format.VideoSupportedFrameRateRanges)
            {
                if (range.MinFrameRate <= (desiredFrameRate + epsilon) && range.MaxFrameRate >= (desiredFrameRate - epsilon))
                {
                    var duration = new CMTime(1, desiredFrameRate, 0)
                    {
                        TimeFlags = CMTime.Flags.Valid
                    };
                    var minDuration = new CMTime(1, (int)range.MinFrameRate, 0)
                    {
                        TimeFlags = CMTime.Flags.Valid
                    };
                    captureDevice.ActiveVideoMaxFrameDuration = duration;
                    captureDevice.ActiveVideoMinFrameDuration = duration;
                    break;
                }
            }

            captureDevice.UnlockForConfiguration();

            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                ReleaseCaptureSession();
                ReleaseCaptureDevice();
                return;
            }

            captureSession.AddInput(input);
            captureVideoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                BackgroundColor = UIColor.Black.CGColor,
                VideoGravity    = AVLayerVideoGravity.ResizeAspectFill,
                Frame           = UIScreen.MainScreen.Bounds
            };
            captureVideoPreviewLayer.Connection.VideoOrientation = CameraHelper.VideoOrientationFromCurrentDeviceOrientation();

            View.Layer.InsertSublayer(captureVideoPreviewLayer, 0);

            var settings = new CVPixelBufferAttributes
            {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            using (var output = new AVCaptureVideoDataOutput {
                WeakVideoSettings = settings.Dictionary
            })
            {
                queue          = new DispatchQueue("cameraoutputqueue");
                outputRecorder = new CustomOutputRecorder();
                output.AlwaysDiscardsLateVideoFrames = true;
                output.SetSampleBufferDelegateQueue(outputRecorder, queue);
                captureSession.AddOutput(output);
                var connection = output.ConnectionFromMediaType(AVMediaType.Video);
                if (connection != null)
                {
                    connection.VideoOrientation = CameraHelper.VideoOrientationFromCurrentDeviceOrientation();
                }
            }

            captureSession.StartRunning();
        }
        public void SetupCamera()
        {
            AVCaptureDevice captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaTypes.Video, AVCaptureDevicePosition.Back);

            if (captureDevice == null)
            {
                Console.WriteLine("Could not create capture device");
                return;
            }
            CaptureDevice = captureDevice;
            if (captureDevice.SupportsAVCaptureSessionPreset(AVCaptureSession.Preset3840x2160))
            {
                captureSession.SessionPreset = AVCaptureSession.Preset3840x2160;
                BufferAspectRatio            = 3840.0 / 2160.0;
            }
            else
            {
                captureSession.SessionPreset = AVCaptureSession.Preset1920x1080;
                BufferAspectRatio            = 1920.0 / 1080.0;
            }

            AVCaptureDeviceInput deviceInput = new AVCaptureDeviceInput(captureDevice, out NSError deviceInputErr);

            if (deviceInputErr != null)
            {
                Console.WriteLine("Could not create device input");
                deviceInputErr.Dispose();
                return;
            }
            if (captureSession.CanAddInput(deviceInput))
            {
                captureSession.AddInput(deviceInput);
            }

            VideoDataOutput.AlwaysDiscardsLateVideoFrames = true;
            VideoDataOutput.SetSampleBufferDelegateQueue(this, VideoDataOutputQueue);
            //VideoDataOutput.WeakVideoSettings = new NSDictionary<NSString, NSString> ();
            //VideoDataOutput.WeakVideoSettings.TryAdd<NSString, NSString> (CVPixelBuffer.PixelFormatTypeKey, OSType);

            if (captureSession.CanAddOutput(VideoDataOutput))
            {
                captureSession.AddOutput(VideoDataOutput);
                VideoDataOutput.ConnectionFromMediaType(AVMediaType.Video).PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Off;
            }
            else
            {
                Console.WriteLine("Could not add VDO output");
            }

            _ = captureDevice.LockForConfiguration(out NSError lockConf);
            if (lockConf != null)
            {
                Console.WriteLine("Could not set zoom level due to error: " + lockConf);
                lockConf.Dispose();
                return;
            }
            captureDevice.VideoZoomFactor           = 2;
            captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near;
            captureDevice.UnlockForConfiguration();

            captureSession.StartRunning();
        }
예제 #26
0
        public void SetupSession()
        {
            videoPreviewLayer.Session = captureSession;
            videoPreviewLayer.Frame   = liveCameraStream.Bounds;
            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = GetBackCamera();

            ConfigureCameraForDevice(captureDevice);
            NSError err;

            videoDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice, out err);
            videoDataOutput  = new AVCaptureVideoDataOutput
            {
                AlwaysDiscardsLateVideoFrames = true
            };
            DispatchQueue queue = new DispatchQueue("dbrcameraQueue");

            if (captureSession.CanAddInput(videoDeviceInput))
            {
                captureSession.AddInput(videoDeviceInput);
                DispatchQueue.MainQueue.DispatchAsync(() =>
                {
                    var initialVideoOrientation = AVCaptureVideoOrientation.Portrait;
                    var statusBarOrientation    = UIApplication.SharedApplication.StatusBarOrientation;
                    if (statusBarOrientation != UIInterfaceOrientation.Unknown)
                    {
                        AVCaptureVideoOrientation videoOrintation;
                        if (Enum.TryParse(statusBarOrientation.ToString(), out videoOrintation))
                        {
                            initialVideoOrientation = videoOrintation;
                        }
                    }
                    videoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation;
                });
            }
            else if (err != null)
            {
                Console.WriteLine($"Could not create video device input: {err}");
                //this.setupResult = SessionSetupResult.ConfigurationFailed;
                this.captureSession.CommitConfiguration();
                return;
            }
            else
            {
                Console.WriteLine("Could not add video device input to the session");
                //this.setupResult = SessionSetupResult.ConfigurationFailed;
                this.captureSession.CommitConfiguration();
                return;
            }

            if (captureSession.CanAddOutput(videoDataOutput))
            {
                captureSession.AddOutput(videoDataOutput);
                captureOutput.reader = reader;
                captureOutput.update = ResetResults;

                videoDataOutput.SetSampleBufferDelegateQueue(captureOutput, queue);
                videoDataOutput.WeakVideoSettings = new NSDictionary <NSString, NSObject>(CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromInt32((int)CVPixelFormatType.CV32BGRA));
            }
            else
            {
                Console.WriteLine("Could not add metadata output to the session");
                //this.setupResult = SessionSetupResult.ConfigurationFailed;
                captureSession.CommitConfiguration();

                return;
            }
            captureSession.CommitConfiguration();
        }
예제 #27
0
        void Initialize(bool defaultTorchOn, bool vibrationOnDetected, bool startScanningOnCreate)
        {
            Configuration.IsScanning = startScanningOnCreate;
            CaptureSession           = new AVCaptureSession();
            CaptureSession.BeginConfiguration();
            this.AutoresizingMask = UIViewAutoresizing.FlexibleDimensions;
            previewLayer          = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame        = this.Bounds,
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill
            };
            var videoDevices   = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = AVCaptureDevicePosition.Back;
            //var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);


            if (device == null)
            {
                return;
            }

            NSError error;
            var     input = new AVCaptureDeviceInput(device, out error);

            CaptureSession.AddInput(input);
            CaptureSession.SessionPreset = AVFoundation.AVCaptureSession.Preset1280x720;
            Layer.AddSublayer(previewLayer);

            CaptureSession.CommitConfiguration();



            VideoDataOutput = new AVCaptureVideoDataOutput
            {
                AlwaysDiscardsLateVideoFrames = true,
                WeakVideoSettings             = new CVPixelBufferAttributes {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }
                .Dictionary
            };


            captureVideoDelegate             = new CaptureVideoDelegate(vibrationOnDetected);
            captureVideoDelegate.OnDetected += (list) =>
            {
                InvokeOnMainThread(() => {
                    //CaptureSession.StopRunning();
                    this.OnDetected?.Invoke(list);
                });
            };
            VideoDataOutput.SetSampleBufferDelegateQueue(captureVideoDelegate, CoreFoundation.DispatchQueue.MainQueue);

            CaptureSession.AddOutput(VideoDataOutput);
            InvokeOnMainThread(() =>
            {
                CaptureSession.StartRunning();
                //Torch on by default
                if (defaultTorchOn && !GoogleVisionBarCodeScanner.Methods.IsTorchOn())
                {
                    GoogleVisionBarCodeScanner.Methods.ToggleFlashlight();
                }
            });
        }
예제 #28
0
		private bool addImageSamplerOutput( out string errorMessage, int minimumSampleIntervalInMilliSeconds )
		{
			errorMessage = "";

			// create a VideoDataOutput and add it to the capture session
			frameGrabberOutput = new AVCaptureVideoDataOutput();
			frameGrabberOutput.WeakVideoSettings = new CVPixelBufferAttributes () { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary;
			// set up the output queue and delegate
			queue = new CoreFoundation.DispatchQueue ("captureQueue");
			videoFrameSampler = new VideoFrameSamplerDelegate();
			frameGrabberOutput.SetSampleBufferDelegateQueue (videoFrameSampler, queue);

			// subscribe to from capture events
			videoFrameSampler.CaptureError += new EventHandler<CaptureErrorEventArgs>( handleImageCaptureError );
			videoFrameSampler.ImageCaptured += new EventHandler<ImageCaptureEventArgs>( handleImageCaptured );

			// add the output to the session
			session.AddOutput (frameGrabberOutput);

			// set minimum time interval between image samples (if possible).
			try
			{
				AVCaptureConnection connection = (AVCaptureConnection)frameGrabberOutput.Connections[0];
				connection.VideoMinFrameDuration = new CMTime(minimumSampleIntervalInMilliSeconds, 1000);
			}
			catch
			{
			}

			return true;
		}
예제 #29
0
        bool SetupCaptureSession()
        {
            if (CameraPreviewSettings.Instance.Decoder == null)
            {
                return(false);
            }

            var started = DateTime.UtcNow;

            var availableResolutions = new List <CameraResolution>();

            var consideredResolutions = new Dictionary <NSString, CameraResolution> {
                { AVCaptureSession.Preset352x288, new CameraResolution   {
                      Width = 352, Height = 288
                  } },
                { AVCaptureSession.PresetMedium, new CameraResolution    {
                      Width = 480, Height = 360
                  } },                                                                                      //480x360
                { AVCaptureSession.Preset640x480, new CameraResolution   {
                      Width = 640, Height = 480
                  } },
                { AVCaptureSession.Preset1280x720, new CameraResolution  {
                      Width = 1280, Height = 720
                  } },
                { AVCaptureSession.Preset1920x1080, new CameraResolution {
                      Width = 1920, Height = 1080
                  } }
            };

            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.Preset640x480
            };

            // create a device input and attach it to the session
            //          var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
            AVCaptureDevice captureDevice = null;
            var             devices       = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);

            foreach (var device in devices)
            {
                captureDevice = device;
                if (CameraPreviewSettings.Instance.ScannerOptions.UseFrontCameraIfAvailable.HasValue &&
                    CameraPreviewSettings.Instance.ScannerOptions.UseFrontCameraIfAvailable.Value &&
                    device.Position == AVCaptureDevicePosition.Front)
                {
                    break; //Front camera successfully set
                }
                else if (device.Position == AVCaptureDevicePosition.Back &&
                         (!CameraPreviewSettings.Instance.ScannerOptions.UseFrontCameraIfAvailable.HasValue ||
                          !CameraPreviewSettings.Instance.ScannerOptions.UseFrontCameraIfAvailable.Value))
                {
                    break; //Back camera succesfully set
                }
            }
            if (captureDevice == null)
            {
                Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device");
                return(false);
            }

            CameraResolution resolution = null;

            // Find resolution
            // Go through the resolutions we can even consider
            foreach (var cr in consideredResolutions)
            {
                // Now check to make sure our selected device supports the resolution
                // so we can add it to the list to pick from
                if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key))
                {
                    availableResolutions.Add(cr.Value);
                }
            }

            resolution = CameraPreviewSettings.Instance.ScannerOptions.GetResolution(availableResolutions);

            // See if the user selected a resolution
            if (resolution != null)
            {
                // Now get the preset string from the resolution chosen
                var preset = (from c in consideredResolutions
                              where c.Value.Width == resolution.Width &&
                              c.Value.Height == resolution.Height
                              select c.Key).FirstOrDefault();

                // If we found a matching preset, let's set it on the session
                if (!string.IsNullOrEmpty(preset))
                {
                    session.SessionPreset = preset;
                }
            }

            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input - this won't work on the simulator, try a physical device");
                return(false);
            }
            else
            {
                session.AddInput(input);
            }


            var startedAVPreviewLayerAlloc = PerformanceCounter.Start();

            previewLayer = new AVCaptureVideoPreviewLayer(session);

            PerformanceCounter.Stop(startedAVPreviewLayerAlloc, "Alloc AVCaptureVideoPreviewLayer took {0} ms.");

            var perf2 = PerformanceCounter.Start();

#if __UNIFIED__
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
#else
            previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill;
#endif
            previewLayer.Frame    = new CGRect(0, 0, this.Frame.Width, this.Frame.Height);
            previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2));

            layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height));
            layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight;
            layerView.Layer.AddSublayer(previewLayer);

            this.AddSubview(layerView);

            ResizePreview(UIApplication.SharedApplication.StatusBarOrientation);


            PerformanceCounter.Stop(perf2, "PERF: Setting up layers took {0} ms");

            var perf3 = PerformanceCounter.Start();

            session.StartRunning();

            PerformanceCounter.Stop(perf3, "PERF: session.StartRunning() took {0} ms");

            var perf4 = PerformanceCounter.Start();

            var videoSettings = NSDictionary.FromObjectAndKey(new NSNumber((int)CVPixelFormatType.CV32BGRA),
                                                              CVPixelBuffer.PixelFormatTypeKey);


            // create a VideoDataOutput and add it to the sesion
            output = new AVCaptureVideoDataOutput
            {
                WeakVideoSettings = videoSettings
            };

            // configure the output
            queue          = new DispatchQueue("CamerPreviewView"); // (Guid.NewGuid().ToString());
            outputRecorder = new DefaultOutputRecorder(resultCallback);
            output.AlwaysDiscardsLateVideoFrames = true;
            output.SetSampleBufferDelegateQueue(outputRecorder, queue);

            PerformanceCounter.Stop(perf4, "PERF: SetupCamera Finished.  Took {0} ms.");

            session.AddOutput(output);
            //session.StartRunning ();


            var perf5 = PerformanceCounter.Start();

            NSError err = null;
            if (captureDevice.LockForConfiguration(out err))
            {
                if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
                {
                    captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                }
                else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus))
                {
                    captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus;
                }

                if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure))
                {
                    captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure;
                }
                else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose))
                {
                    captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose;
                }

                if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance))
                {
                    captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance;
                }
                else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance))
                {
                    captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance;
                }

                if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported)
                {
                    captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near;
                }

                if (captureDevice.FocusPointOfInterestSupported)
                {
                    captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f);
                }

                if (captureDevice.ExposurePointOfInterestSupported)
                {
                    captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f);
                }

                captureDevice.UnlockForConfiguration();
            }
            else
            {
                Logger.Log("Failed to Lock for Config: " + err.Description);
            }

            PerformanceCounter.Stop(perf5, "PERF: Setup Focus in {0} ms.");

            return(true);
        }
        public void Initialize()
        {
            this.Frame     = new CGRect(new CGPoint(0, 0), new CGSize(mc_iPreviewWidth, mc_iPreviewHeight));
            m_AVCapSession = new AVCaptureSession();

            //m_AVCapDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video);

            var arCamDevice = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);

            if (arCamDevice.Length != 0)
            {
                m_AVCapDevice = arCamDevice[0];
                //フロントカメラを取得
                foreach (AVCaptureDevice camDevice in arCamDevice)
                {
                    if (camDevice.Position == AVCaptureDevicePosition.Back)
                    {
                        m_AVCapDevice = camDevice;
                    }

                    /*
                     * if (camDevice.Position == AVCaptureDevicePosition.Back && m_iCameraDevice == 1)
                     * {
                     *  m_AVCapDevice = camDevice;
                     * }
                     */
                }

                if (m_AVCapDevice == null)
                {
                    m_AVCapDevice = arCamDevice[0];
                }
            }


            NSError device_error;

            m_AVCapDevice.LockForConfiguration(out device_error);
            if (device_error != null)
            {
                Console.WriteLine($"Error: {device_error.LocalizedDescription}");
                m_AVCapDevice.UnlockForConfiguration();
                return;
            }
            //フレームレート設定
            m_AVCapDevice.ActiveVideoMinFrameDuration = new CMTime(1, 24);
            m_AVCapDevice.UnlockForConfiguration();

            if (m_AVCapDevice == null)
            {
                return;
            }



            NSError error = null;

            try{
                //m_AVInput = new AVCaptureDeviceInput(m_AVCapDevice, out error);
                m_AVInput = AVCaptureDeviceInput.FromDevice(m_AVCapDevice);
                if (error != null)
                {
                    Console.WriteLine(error.ToString());
                }
                else
                {
                    m_AVCapSession.AddInput(m_AVInput);
                    m_AVCapSession.BeginConfiguration();

                    m_AVCapSession.CanSetSessionPreset(AVCaptureSession.PresetHigh);

                    m_AVCapSession.CommitConfiguration();

                    m_AVVideoOutput = new AVCaptureVideoDataOutput()
                    {
                        AlwaysDiscardsLateVideoFrames = true,
                        WeakVideoSettings             = new CVPixelBufferAttributes {
                            PixelFormatType = CVPixelFormatType.CV32BGRA
                        }.Dictionary
                    };

                    m_OutputRecorder = new OutputRecorder()
                    {
                        m_CustomCamera = m_CustomCamera
                    };
                    var Queue = new DispatchQueue("myQueue");
                    m_AVVideoOutput.SetSampleBufferDelegateQueue(m_OutputRecorder, Queue);

                    m_AVCapSession.AddOutput(m_AVVideoOutput);
                }

                m_prevAVLayer = new AVCaptureVideoPreviewLayer(m_AVCapSession)
                {
                    Frame        = new CGRect(new CGPoint(0, 0), new CGSize(mc_iPreviewWidth, mc_iPreviewHeight)),
                    VideoGravity = AVLayerVideoGravity.ResizeAspectFill
                };
                Layer.AddSublayer(m_prevAVLayer);

                m_AVCapSession.StartRunning();
            }
            catch (Exception e)
            {
                Console.WriteLine(e.ToString());
            }

            return;
        }
예제 #31
0
        public bool SetupCapture()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            captureSession = new AVCaptureSession()
            {
                //SessionPreset = AVCaptureSession.PresetPhoto
                SessionPreset = AVCaptureSession.Preset1280x720
            };

            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            if (captureDeviceInput == null)
            {
                Console.WriteLine("No video input device");
                return(false);
            }

            if (captureSession.CanAddInput(captureDeviceInput))
            {
                captureSession.AddInput(captureDeviceInput);
            }
            else
            {
                Console.WriteLine("Could not add input capture device to AVACaptureSession");
                return(false);
            }


            // create a VideoDataOutput and add it to the sesion
            AVCaptureVideoDataOutput output = new AVCaptureVideoDataOutput
            {
                AlwaysDiscardsLateVideoFrames = false, // true,
                WeakVideoSettings             = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV24RGB
                }.Dictionary //,

                // If you want to cap the frame rate at a given speed, in this sample: 30 frames per second
                //MinFrameDuration = new CMTime(1, 30)
            };


            CoreFoundation.DispatchQueue videoCaptureQueue = new CoreFoundation.DispatchQueue("Video Capture Queue");
            output.SetSampleBufferDelegateQueue(this, videoCaptureQueue);

            if (captureSession.CanAddOutput(output))
            {
                captureSession.AddOutput(output);
            }
            else
            {
                return(false);
            }

            // add preview layer to this view controller's NSView
            AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(captureSession);

            previewLayer.Frame        = this.View.Bounds;
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;

            if (this.View.Layer == null)
            {
                this.View.WantsLayer = true;
                this.View.Layer      = previewLayer;
            }
            else
            {
                this.View.WantsLayer = true;
                this.View.Layer.AddSublayer(previewLayer);
            }

            captureSession.StartRunning();

            return(true);
        }
		bool AddImageSamplerOutput( out string errorMessage, int minimumSampleIntervalInMilliSeconds )
		{
			errorMessage = string.Empty;

			// create a VideoDataOutput and add it to the capture session
			frameGrabberOutput = new AVCaptureVideoDataOutput();
			frameGrabberOutput.WeakVideoSettings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary;
			// set up the output queue and delegate
			queue = new DispatchQueue ("captureQueue");
			videoFrameSampler = new VideoFrameSamplerDelegate();
			frameGrabberOutput.SetSampleBufferDelegateQueue (videoFrameSampler, queue);

			// subscribe to from capture events
			videoFrameSampler.CaptureError += HandleImageCaptureError;
			videoFrameSampler.ImageCaptured += HandleImageCaptured;

			// add the output to the session
			session.AddOutput (frameGrabberOutput);

			// set minimum time interval between image samples (if possible).
			try {
				AVCaptureConnection connection = frameGrabberOutput.Connections[0];
				connection.VideoMinFrameDuration = new CMTime(minimumSampleIntervalInMilliSeconds, 1000);
			} catch (Exception ex) {
				Console.WriteLine (ex.Message);
			}

			return true;
		}
예제 #33
0
        private void ConfigureSession()
        {
            if (setupResult == SessionSetupResult.Success)
            {
                this.session.BeginConfiguration();

                // Add video input
                // Choose the back wide angle camera if available, otherwise default to the front wide angle camera
                AVCaptureDevice defaultVideoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back) ??
                                                     AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front) ??
                                                     null;

                if (defaultVideoDevice == null)
                {
                    Console.WriteLine("Could not get video device");
                    this.setupResult = SessionSetupResult.ConfigurationFailed;
                    this.session.CommitConfiguration();
                    return;
                }

                //set the focus mode to AutoFocus
                if (defaultVideoDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
                {
                    NSError errorConfig;
                    defaultVideoDevice.LockForConfiguration(out errorConfig);
                    defaultVideoDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                    defaultVideoDevice.UnlockForConfiguration();
                }
                if (defaultVideoDevice.AutoFocusRangeRestrictionSupported)
                {
                    NSError errorConfig;
                    defaultVideoDevice.LockForConfiguration(out errorConfig);
                    defaultVideoDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near;
                    defaultVideoDevice.UnlockForConfiguration();
                }
                //end focus mode setting

                NSError error;
                var     videoDeviceInput = AVCaptureDeviceInput.FromDevice(defaultVideoDevice, out error);

                if (this.session.CanAddInput(videoDeviceInput))
                {
                    this.videoDeviceInput = videoDeviceInput;
                    this.session.AddInput(videoDeviceInput);

                    DispatchQueue.MainQueue.DispatchAsync(() =>
                    {
                        // Why are we dispatching this to the main queue?
                        // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView
                        // can only be manipulated on the main thread
                        // Note: As an exception to the above rule, it's not necessary to serialize video orientation changed
                        // on the AVCaptureVideoPreviewLayer's connection with other session manipulation
                        //
                        // Use the status bar orientation as the internal video orientation. Subsequent orientation changes are
                        // handled by CameraViewController.ViewWillTransition(to:with:).

                        var initialVideoOrientation = AVCaptureVideoOrientation.Portrait;
                        var statusBarOrientation    = UIApplication.SharedApplication.StatusBarOrientation;
                        if (statusBarOrientation != UIInterfaceOrientation.Unknown)
                        {
                            AVCaptureVideoOrientation videoOrintation;
                            if (Enum.TryParse(statusBarOrientation.ToString(), out videoOrintation))
                            {
                                initialVideoOrientation = videoOrintation;
                            }
                        }

                        this.PreviewView.VideoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation;
                    });
                }
                else if (error != null)
                {
                    Console.WriteLine($"Could not create video device input: {error}");
                    this.setupResult = SessionSetupResult.ConfigurationFailed;
                    this.session.CommitConfiguration();
                    return;
                }
                else
                {
                    Console.WriteLine("Could not add video device input to the session");
                    this.setupResult = SessionSetupResult.ConfigurationFailed;
                    this.session.CommitConfiguration();

                    return;
                }

                if (this.session.CanAddOutput(videoOutput))
                {
                    this.session.AddOutput(videoOutput);

                    frameExtractor.update = ResetResult;

                    videoOutput.SetSampleBufferDelegateQueue(frameExtractor, sampleBufferQueue);
                    videoOutput.WeakVideoSettings = new NSDictionary <NSString, NSObject>(CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromInt32((int)CVPixelFormatType.CV32BGRA));

                    DispatchQueue.MainQueue.DispatchAsync(() =>
                    {
                        //var initialRegionOfInterest = this.PreviewView.VideoPreviewLayer.MapToLayerCoordinates(initialRectOfInterest);
                    });
                }
                else
                {
                    Console.WriteLine("Could not add metadata output to the session");
                    this.setupResult = SessionSetupResult.ConfigurationFailed;
                    this.session.CommitConfiguration();

                    return;
                }

                this.session.CommitConfiguration();
            }
        }
예제 #34
0
        bool SetupCaptureSession()
        {
            //Console.WriteLine ("SetupCaptureSession");
            // Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
            // is used to deliver both audio and video buffers, and our video processing consistently takes
            // too long, the delivery queue can back up, resulting in audio being dropped.
            //
            // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
            // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
            //
            // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.

            // Create Capture session
            captureSession = new AVCaptureSession();
            captureSession.BeginConfiguration();

            // Create audio connection
            NSError error;
            var     audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);

            if (audioDevice == null)
            {
                return(false);                // e.g. simulator
            }
            var audioIn = new AVCaptureDeviceInput(audioDevice, out error);

            if (captureSession.CanAddInput(audioIn))
            {
                captureSession.AddInput(audioIn);
            }

            var audioOut          = new AVCaptureAudioDataOutput();
            var audioCaptureQueue = new DispatchQueue("Audio Capture Queue");

            // Add the Delegate to capture each sample that comes through
            audioOut.SetSampleBufferDelegateQueue(this, audioCaptureQueue);

            if (captureSession.CanAddOutput(audioOut))
            {
                captureSession.AddOutput(audioOut);
            }

            audioConnection = audioOut.ConnectionFromMediaType(AVMediaType.Audio);

            // Create Video Session
            var videoDevice = VideoDeviceWithPosition(AVCaptureDevicePosition.Back);
            var videoIn     = new AVCaptureDeviceInput(videoDevice, out error);

            if (captureSession.CanAddInput(videoIn))
            {
                captureSession.AddInput(videoIn);
            }

            // RosyWriter prefers to discard late video frames early in the capture pipeline, since its
            // processing can take longer than real-time on some platforms (such as iPhone 3GS).
            // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
            // alwaysDiscardsLateVideoFrames property to NO.
            var videoOut = new AVCaptureVideoDataOutput {
                AlwaysDiscardsLateVideoFrames = true,
                // HACK: Change VideoSettings to WeakVideoSettings, and AVVideoSettings to CVPixelBufferAttributes
                // VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
                WeakVideoSettings = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }.Dictionary
            };

            // Create a DispatchQueue for the Video Processing
            var videoCaptureQueue = new DispatchQueue("Video Capture Queue");

            videoOut.SetSampleBufferDelegateQueue(this, videoCaptureQueue);

            if (captureSession.CanAddOutput(videoOut))
            {
                captureSession.AddOutput(videoOut);
            }

            // Set the Video connection from the Video Output object
            videoConnection  = videoOut.ConnectionFromMediaType(AVMediaType.Video);
            videoOrientation = videoConnection.VideoOrientation;

            captureSession.CommitConfiguration();

            return(true);
        }
예제 #35
0
bool SetupCaptureSession ()
      {
         // configure the capture session for low resolution, change this if your code
         // can cope with more data or volume
         session = new AVCaptureSession () {
            SessionPreset = AVCaptureSession.PresetMedium
         };

         // create a device input and attach it to the session
         var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
         if (captureDevice == null) {
            Image<Bgr, Byte> img = new Image<Bgr, byte> (512, 512, new Bgr (255, 255, 255));
            CvInvoke.PutText (
               img, 
               "Capture device not found.", 
               new Point (10, 200), 
               FontFace.HersheyComplex, 
               1, 
               new MCvScalar (), 
               2);
            ImageView.Image = img.ToUIImage();
            return false;
         }
         var input = AVCaptureDeviceInput.FromDevice (captureDevice);
         if (input == null){
            Console.WriteLine ("No input device");
            return false;
         }
         session.AddInput (input);

         // create a VideoDataOutput and add it to the sesion
         AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();
         settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
         var output = new AVCaptureVideoDataOutput () {
            UncompressedVideoSetting = settingUncomp,

            // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
            //MinFrameDuration = new CMTime (1, 15)
         };
        

         // configure the output
         queue = new DispatchQueue ("myQueue");
         outputRecorder = new OutputRecorder (ImageView);
         output.SetSampleBufferDelegateQueue(outputRecorder, queue);
         session.AddOutput (output);

         session.StartRunning ();
         return true;
      }