Exemple #1
0
        public void RecordVideoToPath(UIViewController ViewController, string VideoPath)
        {
            // setup capture device
              AVCaptureDevice videoRecordingDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
              NSError error;
              AVCaptureDeviceInput videoInput = new AVCaptureDeviceInput(videoRecordingDevice, out error);

              // create and assign a capture session
              AVCaptureSession captureSession = new AVCaptureSession();
              captureSession.SessionPreset = AVCaptureSession.Preset1280x720;
              captureSession.AddInput(videoInput);

              // Create capture device output
              AVCaptureVideoDataOutput videoOutput = new AVCaptureVideoDataOutput();
              captureSession.AddOutput(videoOutput);
              videoOutput.VideoSettings.PixelFormat = CVPixelFormatType.CV32BGRA;
              videoOutput.MinFrameDuration = new CMTime(1, 30);
              videoOutput.SetSampleBufferDelegatequeue(captureVideoDelegate, System.IntPtr.Zero);

              // create a delegate class for handling capture
              captureVideoDelegate = new CaptureVideoDelegate(ViewController);

              // Start capture session
              captureSession.StartRunning();
        }
        public void RecordVideoToPath(UIViewController ViewController, string VideoPath)
        {
            // setup capture device
            AVCaptureDevice      videoRecordingDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            NSError              error;
            AVCaptureDeviceInput videoInput = new AVCaptureDeviceInput(videoRecordingDevice, out error);

            // create and assign a capture session
            AVCaptureSession captureSession = new AVCaptureSession();

            captureSession.SessionPreset = AVCaptureSession.Preset1280x720;
            captureSession.AddInput(videoInput);

            // Create capture device output
            AVCaptureVideoDataOutput videoOutput = new AVCaptureVideoDataOutput();

            captureSession.AddOutput(videoOutput);
            videoOutput.VideoSettings.PixelFormat = CVPixelFormatType.CV32BGRA;
            videoOutput.MinFrameDuration          = new CMTime(1, 30);
            videoOutput.SetSampleBufferDelegatequeue(captureVideoDelegate, System.IntPtr.Zero);

            // create a delegate class for handling capture
            captureVideoDelegate = new CaptureVideoDelegate(ViewController);

            // Start capture session
            captureSession.StartRunning();
        }
Exemple #3
0
        void Initialize()
        {
            Configuration.IsScanning = true;
            SizeChange();
            CaptureSession = new AVCaptureSession();
            CaptureSession.BeginConfiguration();
            this.Frame   = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Width);
            previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame        = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Width),
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill,
            };
            setPreviewOrientation();
            var videoDevices   = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = AVCaptureDevicePosition.Back;
            //var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);

            if (device == null)
            {
                return;
            }

            NSError error;
            var     input = new AVCaptureDeviceInput(device, out error);

            CaptureSession.AddInput(input);
            CaptureSession.SessionPreset = AVFoundation.AVCaptureSession.Preset640x480;
            Layer.AddSublayer(previewLayer);

            CaptureSession.CommitConfiguration();
            CaptureSession.StartRunning();

            VideoDataOutput = new AVCaptureVideoDataOutput();
            VideoDataOutput.AlwaysDiscardsLateVideoFrames = true;
            VideoDataOutput.WeakVideoSettings             = new CVPixelBufferAttributes {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            }.Dictionary;


            captureVideoDelegate             = new CaptureVideoDelegate();
            captureVideoDelegate.OnDetected += (list) =>
            {
                this.OnDetected?.Invoke(list);
                CaptureSession.StopRunning();
            };
            VideoDataOutput.SetSampleBufferDelegateQueue(captureVideoDelegate, CoreFoundation.DispatchQueue.MainQueue);

            CaptureSession.AddOutput(VideoDataOutput);
        }
        void Initialize(bool defaultTorchOn, bool vibrationOnDetected, bool startScanningOnCreate)
        {
            Configuration.IsScanning = startScanningOnCreate;
            CaptureSession           = new AVCaptureSession();
            CaptureSession.BeginConfiguration();
            this.AutoresizingMask = UIViewAutoresizing.FlexibleDimensions;
            previewLayer          = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame        = this.Bounds,
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill
            };
            var videoDevices   = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = AVCaptureDevicePosition.Back;
            //var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);


            if (device == null)
            {
                return;
            }

            NSError error;
            var     input = new AVCaptureDeviceInput(device, out error);

            CaptureSession.AddInput(input);
            CaptureSession.SessionPreset = AVFoundation.AVCaptureSession.Preset1280x720;
            Layer.AddSublayer(previewLayer);

            CaptureSession.CommitConfiguration();



            VideoDataOutput = new AVCaptureVideoDataOutput
            {
                AlwaysDiscardsLateVideoFrames = true,
                WeakVideoSettings             = new CVPixelBufferAttributes {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }
                .Dictionary
            };


            captureVideoDelegate             = new CaptureVideoDelegate(vibrationOnDetected);
            captureVideoDelegate.OnDetected += (list) =>
            {
                InvokeOnMainThread(() => {
                    //CaptureSession.StopRunning();
                    this.OnDetected?.Invoke(list);
                });
            };
            VideoDataOutput.SetSampleBufferDelegateQueue(captureVideoDelegate, CoreFoundation.DispatchQueue.MainQueue);

            CaptureSession.AddOutput(VideoDataOutput);
            InvokeOnMainThread(() =>
            {
                CaptureSession.StartRunning();
                //Torch on by default
                if (defaultTorchOn && !GoogleVisionBarCodeScanner.Methods.IsTorchOn())
                {
                    GoogleVisionBarCodeScanner.Methods.ToggleFlashlight();
                }
            });
        }