示例#1
0
        private void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();
            captureSession.SessionPreset = AVCaptureSession.PresetMedium;
            videoPreviewLayer            = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame       = new CGRect(0f, 0f, View.Bounds.Width, View.Bounds.Height),
                Orientation = GetCameraForOrientation()
            };
            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput       = AVCaptureDeviceInput.FromDevice(captureDevice);
            aVCaptureMovieFileOutput = new AVCaptureMovieFileOutput();

            var audioDevice      = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio);
            var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice);


            captureSession.AddOutput(aVCaptureMovieFileOutput);
            captureSession.AddInput(captureDeviceInput);
            captureSession.AddInput(audioDeviceInput);
            aVCaptureMovieFileOutput.ConnectionFromMediaType(AVMediaType.Video).VideoOrientation = GetCameraForOrientation();
            captureSession.StartRunning();
        }
示例#2
0
        public void RecordVideoToPath(UIViewController ViewController, string VideoPath)
        {
            // setup capture device
            AVCaptureDevice      videoRecordingDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            NSError              error;
            AVCaptureDeviceInput videoInput = new AVCaptureDeviceInput(videoRecordingDevice, out error);

            // create and assign a capture session
            AVCaptureSession captureSession = new AVCaptureSession();

            captureSession.SessionPreset = AVCaptureSession.Preset1280x720;
            captureSession.AddInput(videoInput);

            // Create capture device output
            AVCaptureVideoDataOutput videoOutput = new AVCaptureVideoDataOutput();

            captureSession.AddOutput(videoOutput);
            videoOutput.VideoSettings.PixelFormat = CVPixelFormatType.CV32BGRA;
            videoOutput.MinFrameDuration          = new CMTime(1, 30);
            videoOutput.SetSampleBufferDelegatequeue(captureVideoDelegate, System.IntPtr.Zero);

            // create a delegate class for handling capture
            captureVideoDelegate = new CaptureVideoDelegate(ViewController);

            // Start capture session
            captureSession.StartRunning();
        }
示例#3
0
        /// <summary>
        /// Start camera preview
        /// </summary>
        public override void StartCamera()
        {
            if (Session == null)
            {
                Session = new AVCaptureSession();

                Device = Configuration.ShowBackCameraFirst
                    ? AVCaptureDevice.Devices.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back)
                    : AVCaptureDevice.Devices.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front);

                if (Device == null)
                {
                    NoCameraAvailable();
                    Console.WriteLine("Could not find capture device, does your device have a camera?");
                    return;
                }

                try
                {
                    NSError error;
                    VideoInput = new AVCaptureDeviceInput(Device, out error);

                    Session.AddInput(VideoInput);

                    _videoOutput = new AVCaptureMovieFileOutput {
                        MinFreeDiskSpaceLimit = 1024 * 1024
                    };

                    if (Session.CanAddOutput(_videoOutput))
                    {
                        Session.AddOutput(_videoOutput);
                    }

                    if (Configuration.RecordAudio)
                    {
                        var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);

                        _audioInput = new AVCaptureDeviceInput(audioDevice, out error);
                        if (Session.CanAddInput(_audioInput))
                        {
                            Session.AddInput(_audioInput);
                        }
                    }

                    if (Configuration.DetectFaces)
                    {
                        SetupFaceDetection();
                    }

                    SetupVideoPreviewLayer();

                    Session.StartRunning();
                }
                catch { /* ignore */ }

                FlashConfiguration(true);
            }

            base.StartCamera();
        }
示例#4
0
        public void TurnOff()
        {
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                return;
            }

            NSError error = null;

            captureDevice.LockForConfiguration(out error);
            if (error != null)
            {
                captureDevice.UnlockForConfiguration();
                return;
            }
            else
            {
                if (captureDevice.TorchMode != AVCaptureTorchMode.Off)
                {
                    captureDevice.TorchMode = AVCaptureTorchMode.Off;
                }
                captureDevice.UnlockForConfiguration();
            }
        }
        private void SetupLiveCameraStream()
        {
            captureSession    = new AVCaptureSession();
            videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame       = liveCameraStream.Bounds,
                Orientation = GetCameraForOrientation()
            };
            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            var dictionary = new NSMutableDictionary();

            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            stillImageOutput             = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary(),
            };
            captureSession.AddOutput(stillImageOutput);
            captureSession.AddInput(captureDeviceInput);
            stillImageOutput.ConnectionFromMediaType(AVMediaType.Video).VideoOrientation = GetCameraForOrientation();
            captureSession.StartRunning();
        }
示例#6
0
        public void SetupLiveCameraStream()
        {
            CaptureSession = new AVCaptureSession();

            var viewLayer = this.Layer;

            videoPreviewLayer = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame = this.Frame
            };
            this.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            CaptureSession.AddInput(captureDeviceInput);

            var dictionary = new NSMutableDictionary();

            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            stillImageOutput             = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            CaptureSession.AddOutput(stillImageOutput);
            CaptureSession.StartRunning();
        }
        public void Focus(PointF pointOfInterest)
        {
            //Get the device
            if (AVMediaType.Video == null)
            {
                return;
            }

            var device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (device == null)
            {
                return;
            }

            //See if it supports focusing on a point
            if (device.FocusPointOfInterestSupported && !device.AdjustingFocus)
            {
                NSError err = null;

                //Lock device to config
                if (device.LockForConfiguration(out err))
                {
                    Console.WriteLine("Focusing at point: " + pointOfInterest.X + ", " + pointOfInterest.Y);

                    //Focus at the point touched
                    device.FocusPointOfInterest = pointOfInterest;
                    device.FocusMode            = AVCaptureFocusMode.ContinuousAutoFocus;
                    device.UnlockForConfiguration();
                }
            }
        }
        /// <summary>
        /// Turn the lamp off
        /// </summary>
        public void TurnOff()
        {
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                Debug.WriteLine("No captureDevice - this won't work on the simulator, try a physical device");
                return;
            }

            NSError error = null;

            captureDevice.LockForConfiguration(out error);
            if (error != null)
            {
                Debug.WriteLine(error);
                captureDevice.UnlockForConfiguration();
                return;
            }
            else
            {
                if (captureDevice.TorchMode != AVCaptureTorchMode.Off)
                {
                    captureDevice.TorchMode = AVCaptureTorchMode.Off;
                }
                captureDevice.UnlockForConfiguration();
            }
        }
        public void SetTorch(bool on)
        {
            try
            {
                NSError err;

                var device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
                device.LockForConfiguration(out err);

                if (on)
                {
                    device.TorchMode = AVCaptureTorchMode.On;
                    device.FlashMode = AVCaptureFlashMode.On;
                }
                else
                {
                    device.TorchMode = AVCaptureTorchMode.Off;
                    device.FlashMode = AVCaptureFlashMode.Off;
                }

                device.UnlockForConfiguration();
                device = null;

                torch = on;
            }
            catch { }
        }
        private bool initScanner()
        {
            device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            if (device == null)
            {
                this.Debug("AVCaptureDevice is null");

                return(false);
            }

            input = AVCaptureDeviceInput.FromDevice(device);

            if (input == null)
            {
                this.Debug("AVCaptureDeviceInput is null");

                return(false);
            }

            output = new AVCaptureMetadataOutput();
            output.SetDelegate(this, DispatchQueue.MainQueue);

            session = new AVCaptureSession();
            session.AddInput(input);
            session.AddOutput(output);
            output.MetadataObjectTypes = configuration.Barcodes.ConvertToIOS();

            captureVideoPreviewLayer              = AVCaptureVideoPreviewLayer.FromSession(session);
            captureVideoPreviewLayer.Frame        = CGRect.Empty;
            captureVideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            captureVideoPreviewLayer.Connection.VideoOrientation = getDeviceOrientation();

            return(true);
        }
        public void RefcountTest()
        {
            TestRuntime.AssertSystemVersion(PlatformName.iOS, 7, 0, throwIfOtherPlatform: false);

            // Bug #27205

            var auth = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);

            switch (auth)
            {
            case AVAuthorizationStatus.Restricted:
            case AVAuthorizationStatus.Denied:
            case AVAuthorizationStatus.NotDetermined:
                Assert.Inconclusive("This test requires video recording permissions.");
                return;
            }

            using (var captureSession = new AVCaptureSession()) {
                using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                    foreach (var format in videoDevice.Formats)
                    {
                        for (int i = 0; i < 10; i++)
                        {
                            using (var f = format.FormatDescription) {
                            }
                        }
                    }
                }
            }
        }
示例#12
0
        void ShowCameraPreview()
        {
            var captureSession = new AVCaptureSession();

            var viewLayer = liveCameraStream.Layer;

            videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = this.View.Frame
            };
            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            captureSession.AddInput(captureDeviceInput);

            var dictionary = new NSMutableDictionary();

            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            stillImageOutput             = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            captureSession.AddOutput(stillImageOutput);
            captureSession.StartRunning();

            captureButton.Hidden = false;
            cancelButton.Hidden  = false;
        }
示例#13
0
        bool SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };

            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device");
                return(false);
            }
            //Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration()
            NSError error = null;

            captureDevice.LockForConfiguration(out error);
            if (error != null)
            {
                Console.WriteLine(error);
                captureDevice.UnlockForConfiguration();
                return(false);
            }
            if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0))
            {
                captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15);
            }
            captureDevice.UnlockForConfiguration();

            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input - this won't work on the simulator, try a physical device");
                return(false);
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            var output = new AVCaptureVideoDataOutput()
            {
                WeakVideoSettings = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }.Dictionary,
            };

            // configure the output
            queue          = new CoreFoundation.DispatchQueue("myQueue");
            outputRecorder = new OutputRecorder();
            output.SetSampleBufferDelegate(outputRecorder, queue);
            session.AddOutput(output);

            session.StartRunning();
            return(true);
        }
示例#14
0
        public void RefcountTest()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(7, 0))
            {
                Assert.Ignore("This test uses iOS 7 API");
            }

            // Bug #27205

            var auth = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);

            switch (auth)
            {
            case AVAuthorizationStatus.Restricted:
            case AVAuthorizationStatus.Denied:
            case AVAuthorizationStatus.NotDetermined:
                Assert.Inconclusive("This test requires video recording permissions.");
                return;
            }

            using (var captureSession = new AVCaptureSession()) {
                using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                    foreach (var format in videoDevice.Formats)
                    {
                        for (int i = 0; i < 10; i++)
                        {
                            using (var f = format.FormatDescription) {
                            }
                        }
                    }
                }
            }
        }
示例#15
0
        private void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();

            var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = liveCameraStream.Bounds
            };

            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);

            stillImageOutput = new AVCaptureStillImageOutput
            {
                OutputSettings = new NSDictionary()
            };

            captureSession.AddOutput(stillImageOutput);
            captureSession.AddInput(captureDeviceInput);
            captureSession.StartRunning();
        }
        void SetupLiveCameraStream()
        {
            _captureSession = new AVCaptureSession();

            var viewLayer = _liveCameraStream.Layer;

            _videoPreviewLayer = new AVCaptureVideoPreviewLayer(_captureSession)
            {
                Frame = _liveCameraStream.Bounds
            };

            _liveCameraStream.AddObserver("bounds", NSKeyValueObservingOptions.New, ObservedBoundsChange);

            _liveCameraStream.Layer.AddSublayer(_videoPreviewLayer);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            _captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);

            var dictionary = new NSMutableDictionary();

            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            _stillImageOutput            = new AVCaptureStillImageOutput {
                OutputSettings = new NSDictionary()
            };

            _captureSession.AddOutput(_stillImageOutput);
            _captureSession.AddInput(_captureDeviceInput);
            _captureSession.StartRunning();
        }
示例#17
0
        public async void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();

            var viewLayer         = liveCameraStream.Layer;
            var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = liveCameraStream.Bounds
            };

            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);

            var dictionary = new NSMutableDictionary();

            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            stillImageOutput             = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            captureSession.AddOutput(stillImageOutput);
            captureSession.AddInput(captureDeviceInput);
            captureSession.StartRunning();

            await SkinSelfie.Pages.CameraPage.ShowTip();
        }
        /// <summary>
        /// Initializes this instance.
        /// </summary>
        private void Initialize()
        {
            var captureSession = new AVCaptureSession();

            _previewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill,
                Frame        = Bounds
            };

            var device = AVCaptureDevice.DefaultDeviceWithMediaType(
                AVMediaType.Video);

            if (device == null)
            {
                System.Diagnostics.Debug.WriteLine("No device detected.");
                return;
            }

            NSError error;

            var input = new AVCaptureDeviceInput(device, out error);

            captureSession.AddInput(input);

            Layer.AddSublayer(_previewLayer);

            captureSession.StartRunning();
        }
示例#19
0
        private void TryStart()
        {
            if (contentLayer != null)
            {
                session = new AVCaptureSession();

                var camera = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

                var input = AVCaptureDeviceInput.FromDevice(camera);
                session.AddInput(input);

                // create a VideoDataOutput and add it to the sesion
                var settings = new CVPixelBufferAttributes
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                };

                using (var output = new AVCaptureVideoDataOutput {
                    WeakVideoSettings = settings.Dictionary
                })
                {
                    queue          = new DispatchQueue("s4mQueue");
                    outputRecorder = new OutputRecorder();
                    output.SetSampleBufferDelegate(outputRecorder, queue);
                    session.AddOutput(output);
                }

                this.contentLayer.Session = session;

                session.StartRunning();
            }
        }
示例#20
0
        void Compare(NSString constant, AVMediaTypes value)
        {
            Assert.That(AVCaptureDevice.GetDefaultDevice(constant), Is.EqualTo(AVCaptureDevice.GetDefaultDevice(value)), value.ToString());
#if !XAMCORE_4_0
            Assert.That(AVCaptureDevice.GetDefaultDevice(constant), Is.EqualTo(AVCaptureDevice.DefaultDeviceWithMediaType((string)constant)), value.ToString() + ".compat");
#endif
        }
示例#21
0
        public void MetadataObjectTypesTest()
        {
            TestRuntime.AssertSystemVersion(PlatformName.iOS, 8, 0, throwIfOtherPlatform: false);

            if (Runtime.Arch != Arch.DEVICE)
            {
                Assert.Ignore("This test only runs on device (requires camera access)");
            }

            TestRuntime.RequestCameraPermission(AVMediaType.Video, true);

            using (var captureSession = new AVCaptureSession()) {
                using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                    NSError error;
                    using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) {
                        if (captureSession.CanAddInput(videoInput))
                        {
                            captureSession.AddInput(videoInput);
                        }

                        using (var metadataOutput = new AVCaptureMetadataOutput()) {
                            if (captureSession.CanAddOutput(metadataOutput))
                            {
                                captureSession.AddOutput(metadataOutput);
                            }

                            AVMetadataObjectType all = AVMetadataObjectType.None;
                            foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType)))
                            {
                                switch (val)
                                {
                                case AVMetadataObjectType.CatBody:
                                case AVMetadataObjectType.DogBody:
                                case AVMetadataObjectType.HumanBody:
                                case AVMetadataObjectType.SalientObject:
                                    // fail *and crash* on iOS 8 (at least on 32bits devices)
                                    if (!TestRuntime.CheckXcodeVersion(11, 0))
                                    {
                                        continue;
                                    }
                                    // xcode 12 beta 1 on device
                                    if ((Runtime.Arch == Arch.DEVICE) && TestRuntime.CheckXcodeVersion(12, 0))
                                    {
                                        continue;
                                    }
                                    break;
                                }
                                metadataOutput.MetadataObjectTypes = val;
                                all |= val;
                                Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString());
                            }
                            metadataOutput.MetadataObjectTypes = all;
                            Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString());
                        }
                    }
                }
            }
        }
        bool SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };

            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                Image <Bgr, Byte> img = new Image <Bgr, byte> (512, 512, new Bgr(255, 255, 255));
                CvInvoke.PutText(
                    img,
                    "Capture device not found.",
                    new Point(10, 200),
                    FontFace.HersheyComplex,
                    1,
                    new MCvScalar(),
                    2);
                ImageView.Image = img.ToUIImage();
                return(false);
            }
            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input device");
                return(false);
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();

            settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
            var output = new AVCaptureVideoDataOutput()
            {
                UncompressedVideoSetting = settingUncomp,

                // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
                //MinFrameDuration = new CMTime (1, 15)
            };


            // configure the output
            queue          = new DispatchQueue("myQueue");
            outputRecorder = new OutputRecorder(ImageView);
            output.SetSampleBufferDelegateQueue(outputRecorder, queue);
            session.AddOutput(output);

            session.StartRunning();
            return(true);
        }
        bool SetupCaptureSession()
        {
            session = new AVCaptureSession();

            AVCaptureDevice device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (device == null)
            {
                Console.WriteLine("No video camera (in simulator?)");
                return(false);                // simulator?
            }

            NSError error = null;

            AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error);

            if (input == null)
            {
                Console.WriteLine("Error: " + error);
            }
            else
            {
                session.AddInput(input);
            }

            AVCaptureMetadataOutput output = new AVCaptureMetadataOutput();

            var dg = new CaptureDelegate(this);

            output.SetDelegate(dg, MonoTouch.CoreFoundation.DispatchQueue.MainQueue);
            session.AddOutput(output);

            // This could be any list of supported barcode types
            output.MetadataObjectTypes = new NSString[] { AVMetadataObject.TypeQRCode, AVMetadataObject.TypeAztecCode };
            // OR you could just accept "all" with the following line;
//			output.MetadataObjectTypes = output.AvailableMetadataObjectTypes;  // empty
            // DEBUG: use this if you're curious about the available types
//			foreach (var t in output.AvailableMetadataObjectTypes)
//				Console.WriteLine(t);


            AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(session);

            //previewLayer.Frame = new RectangleF(0,0, View.Frame.Size.Width, View.Frame.Size.Height);
            previewLayer.Frame        = new RectangleF(0, 0, 320, 290);
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString();
            View.Layer.AddSublayer(previewLayer);

            session.StartRunning();

            Console.WriteLine("StartRunning");
            return(true);
        }
        void SetupAVCapture(NSString sessionPreset)
        {
            if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(context)) == null)
            {
                Console.WriteLine("Could not create the CoreVideo TextureCache");
                return;
            }
            session = new AVCaptureSession();
            session.BeginConfiguration();

            // Preset size
            session.SessionPreset = sessionPreset;

            // Input device
            var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (videoDevice == null)
            {
                Console.WriteLine("No video device");
                return;
            }
            NSError err;
            var     input = new AVCaptureDeviceInput(videoDevice, out err);

            if (err != null)
            {
                Console.WriteLine("Error creating video capture device");
                return;
            }
            session.AddInput(input);

            // Create the output device
            var dataOutput = new AVCaptureVideoDataOutput()
            {
                AlwaysDiscardsLateVideoFrames = true,

                // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of
                // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
                VideoSettings = new AVVideoSettings(CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)
            };

            dataOutputDelegate = new DataOutputDelegate(this);

            //
            // This dispatches the video frames into the main thread, because the OpenGL
            // code is accessing the data synchronously.
            //
            dataOutput.SetSampleBufferDelegateAndQueue(dataOutputDelegate, DispatchQueue.MainQueue);
            session.AddOutput(dataOutput);
            session.CommitConfiguration();
            session.StartRunning();
        }
示例#25
0
        /// <summary>
        /// Retrieves the camera device.
        /// </summary>
        /// <returns><c>true</c>, if camera device was retrieved, <c>false</c> otherwise.</returns>
        public bool RetrieveCameraDevice()
        {
            _device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (_device == null)
            {
                _log.WriteLineTime(_tag + "\n" + "RetrieveCameraDevice() No device detected \n ");

                return(false);
            }

            return(true);
        }
示例#26
0
        public void MetadataObjectTypesTest()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(8, 0))
            {
                Assert.Ignore("Test only works correctly in iOS 8+");
            }

            if (Runtime.Arch != Arch.DEVICE)
            {
                Assert.Ignore("This test only runs on device (requires camera access)");
            }

            var auth = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);

            switch (auth)
            {
            case AVAuthorizationStatus.Restricted:
            case AVAuthorizationStatus.Denied:
                Assert.Fail("This test requires access to the camera, but the app has been denied access.");
                break;
            }

            using (var captureSession = new AVCaptureSession()) {
                using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                    NSError error;
                    using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) {
                        if (captureSession.CanAddInput(videoInput))
                        {
                            captureSession.AddInput(videoInput);
                        }

                        using (var metadataOutput = new AVCaptureMetadataOutput()) {
                            if (captureSession.CanAddOutput(metadataOutput))
                            {
                                captureSession.AddOutput(metadataOutput);
                            }

                            AVMetadataObjectType all = AVMetadataObjectType.None;
                            foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType)))
                            {
                                metadataOutput.MetadataObjectTypes = val;
                                all |= val;
                                Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString());
                            }
                            metadataOutput.MetadataObjectTypes = all;
                            Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString());
                        }
                    }
                }
            }
        }
示例#27
0
        private void SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };



            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                RenderImageMessage("Capture device not found.");

                return;
            }
            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                RenderImageMessage("No input device");

                return;
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();

            settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
            var output = new AVCaptureVideoDataOutput()
            {
                UncompressedVideoSetting = settingUncomp,

                // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
                //MinFrameDuration = new CMTime (1, 15)
            };


            // configure the output
            queue          = new DispatchQueue("myQueue");
            outputRecorder = new OutputRecorder(ImageView);
            output.SetSampleBufferDelegateQueue(outputRecorder, queue);
            session.AddOutput(output);

            session.StartRunning();
        }
示例#28
0
        private void Focus(UITapGestureRecognizer recognizer)
        {
            var point    = recognizer.LocationInView(this);
            var viewSize = Bounds.Size;
            var newPoint =
                new CGPoint(point.Y / viewSize.Height, 1.0 - point.X / viewSize.Width);

            var device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            NSError error;

            if (device.LockForConfiguration(out error))
            {
                if (device.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus))
                {
                    device.FocusMode            = AVCaptureFocusMode.AutoFocus;
                    device.FocusPointOfInterest = newPoint;
                }

                if (device.IsExposureModeSupported(
                        AVCaptureExposureMode.ContinuousAutoExposure))
                {
                    device.ExposureMode            = AVCaptureExposureMode.ContinuousAutoExposure;
                    device.ExposurePointOfInterest = newPoint;
                }

                device.UnlockForConfiguration();
            }

            FocusView.Alpha             = 0;
            FocusView.Center            = point;
            FocusView.BackgroundColor   = UIColor.Clear;
            FocusView.Layer.BorderColor = Configuration.BaseTintColor.CGColor;
            FocusView.Layer.BorderWidth = 1;
            FocusView.Transform         = CGAffineTransform.MakeScale(1.0f, 1.0f);
            Add(FocusView);

            AnimateNotify(0.8, 0.0, 0.8f, 3.0f, UIViewAnimationOptions.CurveEaseIn, () =>
            {
                FocusView.Alpha     = 1;
                FocusView.Transform = CGAffineTransform.MakeScale(0.7f, 0.7f);
            }, finished =>
            {
                FocusView.Transform = CGAffineTransform.MakeScale(1.0f, 1.0f);
                FocusView.RemoveFromSuperview();
            });
        }
示例#29
0
        public void Torch(bool on)
        {
            try
            {
                var device = captureDevice ?? AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
                if (device != null && (device.HasTorch || device.HasFlash))
                {
                    device.LockForConfiguration(out var err);

                    if (err != null)
                    {
                        if (on)
                        {
                            if (device.HasTorch)
                            {
                                device.TorchMode = AVCaptureTorchMode.On;
                            }
                            if (device.HasFlash)
                            {
                                device.FlashMode = AVCaptureFlashMode.On;
                            }
                        }
                        else
                        {
                            if (device.HasTorch)
                            {
                                device.TorchMode = AVCaptureTorchMode.Off;
                            }
                            if (device.HasFlash)
                            {
                                device.FlashMode = AVCaptureFlashMode.Off;
                            }
                        }
                    }

                    try
                    {
                        device.UnlockForConfiguration();
                    }
                    catch { }
                }

                torch = on;
            }
            catch { }
        }
示例#30
0
        bool SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };

            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                // No input device
                return(false);
            }
            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                // No input device
                return(false);
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            var output = new AVCaptureVideoDataOutput()
            {
                VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA)
            };

            // configure the output
            queue     = new DispatchQueue("myQueue");
            qrScanner = new QrScanner(this);
            output.SetSampleBufferDelegateAndQueue(qrScanner, queue);
            session.AddOutput(output);

            previewLayer              = new AVCaptureVideoPreviewLayer(session);
            previewLayer.Orientation  = AVCaptureVideoOrientation.Portrait;
            previewLayer.VideoGravity = "AVLayerVideoGravityResizeAspectFill";

            session.StartRunning();
            return(true);
        }