/******************************* MAIN FUNCTIONS *******************************/
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			CaptureSession = new AVCaptureSession();
			CaptureSession.AddInput (input as AVCaptureInput);

			var captureMetadataOutput = new AVCaptureMetadataOutput();
			metadataDelegate = new MetadataObjectsDelegate();
			metadataDelegate.outer = this;
			captureMetadataOutput.SetDelegate(metadataDelegate, DispatchQueue.MainQueue);
			CaptureSession.AddOutput(captureMetadataOutput);
			captureMetadataOutput.MetadataObjectTypes = AVMetadataObjectType.QRCode;

			VideoPreviewLayer = new AVCaptureVideoPreviewLayer (CaptureSession);
			VideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
			VideoPreviewLayer.Frame = View.Layer.Bounds;
			View.Layer.AddSublayer (VideoPreviewLayer);

			View.BringSubviewToFront (messageLabel);

			QRCodeFrameView = new UIView ();
			QRCodeFrameView.Layer.BorderColor = UIColor.Green.CGColor;
			QRCodeFrameView.Layer.BorderWidth = 2;
			View.AddSubview (QRCodeFrameView);
			View.BringSubviewToFront (QRCodeFrameView);

			CaptureSession.StartRunning();

			cancelButton.Clicked += (sender, e) => {
				this.DismissViewController (true, null);
			};
		}
        public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
        {
            if (codeFrame is null || preview is null)
            {
                return;
            }

            if (metadataObjects.Length == 0)
            {
                codeFrame.Frame = CGRect.Empty;
                return;
            }

            var readableObject = metadataObjects[0] as AVMetadataMachineReadableCodeObject;

            if (readableObject.Type != AVMetadataObjectType.QRCode)
            {
                return;
            }

            var qrObject = preview.GetTransformedMetadataObject(readableObject);

            codeFrame.Frame = qrObject.Bounds;

            if (readableObject.StringValue is string value)
            {
                Detected?.Invoke(this, new BarcodeDetectedEventArgs(value));
            }
        }
 public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
 {
     if (OnCapture != null && metadataObjects != null)
     {
         OnCapture(metadataObjects);
     }
 }
Exemple #4
0
        void setupAVFoundationFaceDetection()
        {
            faceViews = new Dictionary <int, FaceView> ();

            metadataOutput = new AVCaptureMetadataOutput();
            if (!session.CanAddOutput(metadataOutput))
            {
                metadataOutput = null;
                return;
            }

            var metaDataObjectDelegate = new MetaDataObjectDelegate();

            metaDataObjectDelegate.DidOutputMetadataObjectsAction = DidOutputMetadataObjects;

            metadataOutput.SetDelegate(metaDataObjectDelegate, DispatchQueue.MainQueue);
            session.AddOutput(metadataOutput);

            if (!metadataOutput.AvailableMetadataObjectTypes.Contains(AVMetadataObject.TypeFace))
            {
                teardownAVFoundationFaceDetection();
                return;
            }

            metadataOutput.MetadataObjectTypes = new NSString[] { AVMetadataObject.TypeFace };
            updateAVFoundationFaceDetection();
        }
    void setupAVFoundationFaceDetection()
    {
      faceViews = new Dictionary<int, FaceView>();

      metadataOutput = new AVCaptureMetadataOutput();
      if (!session.CanAddOutput(metadataOutput))
      {
        metadataOutput = null;
        return;
      }

      var metaDataObjectDelegate = new MetaDataObjectDelegate();
      metaDataObjectDelegate.DidOutputMetadataObjectsAction = DidOutputMetadataObjects;

      metadataOutput.SetDelegate(metaDataObjectDelegate, DispatchQueue.MainQueue);
      session.AddOutput(metadataOutput);

      if (!metadataOutput.AvailableMetadataObjectTypes.Contains(AVMetadataObject.TypeFace))
      {
        teardownAVFoundationFaceDetection();
        return;
      }

      metadataOutput.MetadataObjectTypes = new NSString[] { AVMetadataObject.TypeFace };
      updateAVFoundationFaceDetection();
    }
Exemple #6
0
        private bool InitScanner(BarcodeScanner.BarcodeFormat barcodeType)
        {
            device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);
            if (device == null)
            {
                return(false);
            }

            input = AVCaptureDeviceInput.FromDevice(device);
            if (input.Device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
            {
                input.Device.LockForConfiguration(out NSError err);
                input.Device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                input.Device.UnlockForConfiguration();
            }

            if (input == null)
            {
                return(false);
            }

            output = new AVCaptureMetadataOutput();
            output.SetDelegate(this, DispatchQueue.MainQueue);

            session = new AVCaptureSession();
            session.AddInput(input);
            session.AddOutput(output);
            output.MetadataObjectTypes = GetBarcodeFormat(barcodeType);

            captureVideoPreviewLayer              = AVCaptureVideoPreviewLayer.FromSession(session);
            captureVideoPreviewLayer.Frame        = CGRect.Empty;
            captureVideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            captureVideoPreviewLayer.Connection.VideoOrientation = GetDeviceOrientation();
            return(true);
        }
Exemple #7
0
        public void Start()
        {
            captureSession = new AVCaptureSession();
            previewLayer   = new AVCaptureVideoPreviewLayer(captureSession)
            {
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill,
            };

            try
            {
                var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video);
                var input         = AVCaptureDeviceInput.FromDevice(captureDevice);
                var output        = new AVCaptureMetadataOutput();
                var queue         = new DispatchQueue("qrQueue");

                captureSession.AddInput(input);
                captureSession.AddOutput(output);

                output.SetDelegate(this, queue);
                output.MetadataObjectTypes = AVMetadataObjectType.QRCode;

                Layer.AddSublayer(previewLayer);

                captureSession.StartRunning();
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
Exemple #8
0
        public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
        {
            // resetEvent is used to drop new notifications if old ones are still processing, to avoid queuing up a bunch of stale data.
            //★★★, 20180831, hm.ji, 연속 스캔하기 위해서는 아래 값을 0으로 반듯이 해야 한다.
            if (this.resetEvent.WaitOne(0))
            {
                DispatchQueue.MainQueue.DispatchAsync(() =>
                {
                    this.RemoveMetadataObjectOverlayLayers();
                    this.AddMetadataOverlayLayers(metadataObjects.Select(this.CreateMetadataOverlay));

                    //OnScanCompleted?.Invoke("EXIT");
                    //DismissViewController(true, null);

                    //if (this.AllScanBarcode.Count == this.SaveCompletedBarcode.Count + this.ScanCompletedBarcode.Count)
                    //{
                    //    Task.Delay(500).Wait();
                    //}
                    //else
                    //{
                    //    //연속스캔 사이의 간격 지정
                    //Task.Delay(1000).ContinueWith((t) => resetEvent.Set());
                    //Thread.Sleep(1000);
                    //}
                    //Task.Delay(1000).Wait();

                    resetEvent.Set();
                });
            }
        }
        private void InitSession()
        {
            try
            {
                //init capture session
                _AVSession = new AVCaptureSession();

                //check permissions
                var authorizationStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);
                if (authorizationStatus != AVAuthorizationStatus.Authorized)
                {
                    return;
                }

                //check capture camera
                var cameras = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
                var camera  = cameras.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back);
                if (camera == null)
                {
                    return;
                }

                //add input to capture session
                _AVDeviceImput = new AVCaptureDeviceInput(camera, out NSError _);
                if (_AVSession.CanAddInput(_AVDeviceImput))
                {
                    _AVSession.AddInput(_AVDeviceImput);
                }
                else
                {
                    return;
                }

                //add output to camera session
                _MetadataObjectsQueue = new DispatchQueue("metadata objects queue");
                _AVMetadataOutput     = new AVCaptureMetadataOutput();
                if (_AVSession.CanAddOutput(_AVMetadataOutput))
                {
                    _AVSession.AddOutput(_AVMetadataOutput);
                }
                else
                {
                    return;
                }
                _AVMetadataOutput.SetDelegate(this, _MetadataObjectsQueue);

                //init the video preview layer and add it to the current view
                _AVVideoPeviewLayer = new AVCaptureVideoPreviewLayer(_AVSession);
                _AVVideoPeviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
                _AVVideoPeviewLayer.Frame        = Bounds;
                this.Layer.AddSublayer(_AVVideoPeviewLayer);

                //start capture session
                StartSession(true);
            }
            catch (Exception ex)
            {
                Console.WriteLine("IOS_SCAN | init error", ex);
            }
        }
Exemple #10
0
            public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
            {
                string text = string.Empty;

                for (int i = 0; i < metadataObjects.Length; i++)
                {
                    AVMetadataObject aVMetadataObject = metadataObjects[i];
                    AVMetadataMachineReadableCodeObject aVMetadataMachineReadableCodeObject = aVMetadataObject as AVMetadataMachineReadableCodeObject;
                    if (aVMetadataMachineReadableCodeObject != null)
                    {
                        AVMetadataMachineReadableCodeObject aVMetadataMachineReadableCodeObject2 = aVMetadataMachineReadableCodeObject;
                        text = aVMetadataMachineReadableCodeObject2.StringValue;
                        text = this._view.ScannedBarcode(text);
                        if (!string.IsNullOrEmpty(text))
                        {
                            int num = this.Buffer.Add(text);
                            if (num > 0)
                            {
                                this._view.ScanOccurred(text);
                            }
                        }
                    }
                    else
                    {
                        iApp.Log.Info("Invalid AVMetadataObject type: " + aVMetadataObject.Type.ToString(), new object[0]);
                    }
                }
            }
Exemple #11
0
		public override bool FinishedLaunching (UIApplication application, NSDictionary launchOptions)
		{
			var output = new AVCaptureMetadataOutput (); //AVMetadataObjectType
			output.MetadataObjectTypes = AVMetadataObjectType.Face | AVMetadataObjectType.AztecCode;
			System.Console.WriteLine (output.MetadataObjectTypes);
			return true;
		}
        private bool initScanner()
        {
            device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            if (device == null)
            {
                this.Debug("AVCaptureDevice is null");

                return(false);
            }

            input = AVCaptureDeviceInput.FromDevice(device);

            if (input == null)
            {
                this.Debug("AVCaptureDeviceInput is null");

                return(false);
            }

            output = new AVCaptureMetadataOutput();
            output.SetDelegate(this, DispatchQueue.MainQueue);

            session = new AVCaptureSession();
            session.AddInput(input);
            session.AddOutput(output);
            output.MetadataObjectTypes = configuration.Barcodes.ConvertToIOS();

            captureVideoPreviewLayer              = AVCaptureVideoPreviewLayer.FromSession(session);
            captureVideoPreviewLayer.Frame        = CGRect.Empty;
            captureVideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            captureVideoPreviewLayer.Connection.VideoOrientation = getDeviceOrientation();

            return(true);
        }
Exemple #13
0
        void setupAVFoundationFaceDetection()
        {
            faceViews = new Dictionary <int, FaceView>();

            metadataOutput = new AVCaptureMetadataOutput();
            if (!captureSession.CanAddOutput(metadataOutput))
            {
                metadataOutput = null;
                return;
            }

            var metaDataObjectDelegate = new MetaDataObjectDelegate();

            metaDataObjectDelegate.DidOutputMetadataObjectsAction = DidOutputMetadataObjects;

            metadataOutput.SetDelegate(metaDataObjectDelegate, DispatchQueue.MainQueue);
            captureSession.AddOutput(metadataOutput);



            if (!metadataOutput.AvailableMetadataObjectTypes.HasFlag(AVMetadataObjectType.Face))
            {
                //teardownAVFoundationFaceDetection();
                return;
            }

            metadataOutput.MetadataObjectTypes = AVMetadataObjectType.Face;

            //DispatchQueue.MainQueue.DispatchAsync();
            //sessionQueue.DispatchAsync(updateAVFoundationFaceDetection);
            //updateAVFoundationFaceDetection();
        }
Exemple #14
0
 public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] faces, AVCaptureConnection connection)
 {
     if (DidOutputMetadataObjectsAction != null)
     {
         DidOutputMetadataObjectsAction(captureOutput, faces, connection);
     }
 }
Exemple #15
0
            public static CaptureSession Create(SelfView parent)
            {
                // create a device input and attach it to the session
                var captureDevice = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video).FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front);

                if (captureDevice == null)
                {
                    return(null);
                }

                var input = AVCaptureDeviceInput.FromDevice(captureDevice);

                if (input == null)
                {
                    return(null);
                }

                var output = new AVCaptureMetadataOutput();
                var cs     = new CaptureSession(parent, input, output);

                // This must be set after the output is added to the sesssion
                output.MetadataObjectTypes = AVMetadataObjectType.Face;

                return(cs);
            }
        public void Defaults()
        {
            using (var obj = new AVCaptureMetadataOutput()) {
#if XAMCORE_2_0
                Assert.AreEqual(AVMetadataObjectType.None, obj.AvailableMetadataObjectTypes, "AvailableMetadataObjectTypes");
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");

                Assert.IsNotNull(obj.WeakAvailableMetadataObjectTypes, "WeakAvailableMetadataObjectTypes");
                Assert.AreEqual(0, obj.WeakAvailableMetadataObjectTypes.Length, "WeakAvailableMetadataObjectTypes#");
                Assert.IsNotNull(obj.WeakMetadataObjectTypes, "WeakMetadataObjectTypes");
                Assert.AreEqual(0, obj.WeakMetadataObjectTypes.Length, "WeakMetadataObjectTypes#");
#else
                Assert.IsNotNull(obj.AvailableMetadataObjectTypes, "AvailableMetadataObjectTypes");
                Assert.AreEqual(0, obj.AvailableMetadataObjectTypes.Length, "AvailableMetadataObjectTypes#");
                Assert.IsNotNull(obj.MetadataObjectTypes, "MetadataObjectTypes");
                Assert.AreEqual(0, obj.MetadataObjectTypes.Length, "MetadataObjectTypes#");
#endif
                if (TestRuntime.CheckSystemVersion(PlatformName.iOS, 7, 0, throwIfOtherPlatform: false))
                {
                    Assert.AreEqual(new RectangleF(0, 0, 1, 1), obj.RectOfInterest, "RectOfInterest");
                }

#if XAMCORE_2_0
                obj.WeakMetadataObjectTypes = null;
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");
                obj.MetadataObjectTypes = AVMetadataObjectType.None;
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");
#else
                obj.MetadataObjectTypes = null;
                Assert.IsNotNull(obj.MetadataObjectTypes, "MetadataObjectTypes");
                Assert.AreEqual(0, obj.MetadataObjectTypes.Length, "MetadataObjectTypes#");
#endif
                obj.SetDelegate(null, null);
            }
        }
Exemple #17
0
        public void MetadataObjectTypesTest()
        {
            TestRuntime.AssertSystemVersion(PlatformName.iOS, 8, 0, throwIfOtherPlatform: false);

            if (Runtime.Arch != Arch.DEVICE)
            {
                Assert.Ignore("This test only runs on device (requires camera access)");
            }

            TestRuntime.RequestCameraPermission(AVMediaType.Video, true);

            using (var captureSession = new AVCaptureSession()) {
                using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                    NSError error;
                    using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) {
                        if (captureSession.CanAddInput(videoInput))
                        {
                            captureSession.AddInput(videoInput);
                        }

                        using (var metadataOutput = new AVCaptureMetadataOutput()) {
                            if (captureSession.CanAddOutput(metadataOutput))
                            {
                                captureSession.AddOutput(metadataOutput);
                            }

                            AVMetadataObjectType all = AVMetadataObjectType.None;
                            foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType)))
                            {
                                switch (val)
                                {
                                case AVMetadataObjectType.CatBody:
                                case AVMetadataObjectType.DogBody:
                                case AVMetadataObjectType.HumanBody:
                                case AVMetadataObjectType.SalientObject:
                                    // fail *and crash* on iOS 8 (at least on 32bits devices)
                                    if (!TestRuntime.CheckXcodeVersion(11, 0))
                                    {
                                        continue;
                                    }
                                    // xcode 12 beta 1 on device
                                    if ((Runtime.Arch == Arch.DEVICE) && TestRuntime.CheckXcodeVersion(12, 0))
                                    {
                                        continue;
                                    }
                                    break;
                                }
                                metadataOutput.MetadataObjectTypes = val;
                                all |= val;
                                Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString());
                            }
                            metadataOutput.MetadataObjectTypes = all;
                            Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString());
                        }
                    }
                }
            }
        }
Exemple #18
0
        public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] faces, AVCaptureConnection connection)
        {
            //List<int> unseen = faceViews.Keys.ToList();
            //List<int> seen = new List<int>();

            //CATransaction.Begin();
            //CATransaction.SetValueForKey(NSObject.FromObject(true), (NSString)(CATransaction.DisableActions.ToString()));

            //foreach (var face in faces) {
            //    // HACK: int faceId = (face as AVMetadataFaceObject).FaceID;
            //    int faceId = (int)(face as AVMetadataFaceObject).FaceID;
            //    unseen.Remove(faceId);
            //    seen.Add(faceId);

            //    FaceView view;
            //    if (faceViews.ContainsKey(faceId))
            //        view = faceViews[faceId];
            //    else {
            //        view = new FaceView();
            //        view.Layer.CornerRadius = 10;
            //        view.Layer.BorderWidth = 3;
            //        view.Layer.BorderColor = UIColor.Green.CGColor;
            //        previewView.AddSubview(view);
            //        faceViews.Add(faceId, view);
            //        view.Id = faceId;
            //        view.Callback = TouchCallBack;
            //        if (lockedFaceID != null)
            //            view.Alpha = 0;
            //    }

            //    AVMetadataFaceObject adjusted = (AVMetadataFaceObject)(previewView.Layer as AVCaptureVideoPreviewLayer).GetTransformedMetadataObject(face);
            //    view.Frame = adjusted.Bounds;
            //}

            //foreach (int faceId in unseen) {
            //    FaceView view = faceViews[faceId];
            //    view.RemoveFromSuperview();
            //    faceViews.Remove(faceId);
            //    if (faceId == lockedFaceID)
            //        clearLockedFace();
            //}

            //if (lockedFaceID != null) {
            //    FaceView view = faceViews[lockedFaceID.GetValueOrDefault()];
            //    // HACK: Cast resulting nfloat to float
            //    // float size = (float)Math.Max (view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor;
            //    float size = (float)(Math.Max(view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor);
            //    float zoomDelta = lockedFaceSize / size;
            //    float lockTime = (float)(CATransition.CurrentMediaTime() - this.lockTime);
            //    float zoomRate = (float)(Math.Log(zoomDelta) / lockTime);
            //    if (Math.Abs(zoomDelta) > 0.1)
            //        device.RampToVideoZoom(zoomRate > 0 ? MaxZoom : 1, zoomRate);
            //}

            //CATransaction.Commit();
        }
			public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
			{
				foreach(var m in metadataObjects)
				{
					if(m is AVMetadataMachineReadableCodeObject)
					{
						MetadataFound(this, m as AVMetadataMachineReadableCodeObject);
					}
				}
			}
Exemple #20
0
 public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
 {
     if (metadataObjects.Length > 0 && metadataObjects[0] is AVMetadataMachineReadableCodeObject readableObject)
     {
         if (!string.IsNullOrWhiteSpace(readableObject.StringValue))
         {
             OnQRCodeScanned?.Invoke(this, readableObject.StringValue);
         }
     }
 }
Exemple #21
0
        void teardownAVFoundationFaceDetection()
        {
            if (metadataOutput != null)
            {
                session.RemoveOutput(metadataOutput);
            }

            metadataOutput = null;
            faceViews      = null;
        }
        public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] faces, AVCaptureConnection connection)
        {
            List<int> unseen = faceViews.Keys.ToList ();
            List<int> seen = new List<int> ();

            CATransaction.Begin ();
            CATransaction.SetValueForKey (NSObject.FromObject (true), (NSString) (CATransaction.DisableActions.ToString ()));

            foreach (var face in faces) {
                int faceId = (face as AVMetadataFaceObject).FaceID;
                unseen.Remove (faceId);
                seen.Add (faceId);

                FaceView view;
                if (faceViews.ContainsKey (faceId))
                    view = faceViews [faceId];
                else {
                    view = new FaceView ();
                    view.Layer.CornerRadius = 10;
                    view.Layer.BorderWidth = 3;
                    view.Layer.BorderColor = UIColor.Green.CGColor;
                    previewView.AddSubview (view);
                    faceViews.Add (faceId, view);
                    view.Id = faceId;
                    view.Callback = TouchCallBack;
                    if (lockedFaceID != null)
                        view.Alpha = 0;
                }

                AVMetadataFaceObject adjusted = (AVMetadataFaceObject)(previewView.Layer as AVCaptureVideoPreviewLayer).GetTransformedMetadataObject (face);
                view.Frame = adjusted.Bounds;
            }

            foreach (int faceId in unseen) {
                FaceView view = faceViews [faceId];
                view.RemoveFromSuperview ();
                faceViews.Remove (faceId);
                if (faceId == lockedFaceID)
                    clearLockedFace ();
            }

            if (lockedFaceID != null) {
                FaceView view = faceViews [lockedFaceID.GetValueOrDefault ()];
                float size = (float)Math.Max (view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor;
                float zoomDelta = lockedFaceSize / size;
                float lockTime = (float)(CATransition.CurrentMediaTime () - this.lockTime);
                float zoomRate = (float)(Math.Log (zoomDelta) / lockTime);
                if (Math.Abs (zoomDelta) > 0.1)
                    device.RampToVideoZoom (zoomRate > 0 ? MaxZoom : 1, zoomRate);
            }

            CATransaction.Commit ();
        }
        bool SetupCaptureSession()
        {
            session = new AVCaptureSession();

            AVCaptureDevice device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (device == null)
            {
                Console.WriteLine("No video camera (in simulator?)");
                return(false);                // simulator?
            }

            NSError error = null;

            AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error);

            if (input == null)
            {
                Console.WriteLine("Error: " + error);
            }
            else
            {
                session.AddInput(input);
            }

            AVCaptureMetadataOutput output = new AVCaptureMetadataOutput();

            var dg = new CaptureDelegate(this);

            output.SetDelegate(dg, MonoTouch.CoreFoundation.DispatchQueue.MainQueue);
            session.AddOutput(output);

            // This could be any list of supported barcode types
            output.MetadataObjectTypes = new NSString[] { AVMetadataObject.TypeQRCode, AVMetadataObject.TypeAztecCode };
            // OR you could just accept "all" with the following line;
//			output.MetadataObjectTypes = output.AvailableMetadataObjectTypes;  // empty
            // DEBUG: use this if you're curious about the available types
//			foreach (var t in output.AvailableMetadataObjectTypes)
//				Console.WriteLine(t);


            AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(session);

            //previewLayer.Frame = new RectangleF(0,0, View.Frame.Size.Width, View.Frame.Size.Height);
            previewLayer.Frame        = new RectangleF(0, 0, 320, 290);
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString();
            View.Layer.AddSublayer(previewLayer);

            session.StartRunning();

            Console.WriteLine("StartRunning");
            return(true);
        }
 public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject [] metadataObjects, AVCaptureConnection connection)
 {
     // resetEvent is used to drop new notifications if old ones are still processing, to avoid queueing up a bunch of stale data.
     if (resetEvent.WaitOne(0))
     {
         DispatchQueue.MainQueue.DispatchAsync(() => {
             RemoveMetadataObjectOverlayLayers();
             AddMetadataOverlayLayers(metadataObjects.Select(CreateMetadataOverlay));
             resetEvent.Set();
         });
     }
 }
Exemple #25
0
 public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
 {
     foreach (var m in metadataObjects)
     {
         var avmmrcobj_readable = (AVMetadataMachineReadableCodeObject)m;
         if (avmmrcobj_readable.StringValue != str_previous_scanned)
         {
             str_previous_scanned    = avmmrcobj_readable.StringValue;
             lab_Result_scanned.Text = str_previous_scanned;
         }
     }
 }
Exemple #26
0
            private CaptureSession(SelfView parent, AVCaptureDeviceInput input, AVCaptureMetadataOutput output)
            {
                this.parent  = parent;
                this.queue   = new DispatchQueue("myQueue");
                this.session = new AVCaptureSession {
                    SessionPreset = AVCaptureSession.PresetMedium
                };

                session.AddInput(input);

                output.SetDelegate(this, queue);
                session.AddOutput(output);
            }
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();
            var device = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video);

            if (device is null)
            {
                this.ShowAlert("无法访问相机", null);
                return;
            }

            var input = AVCaptureDeviceInput.FromDevice(device);

            if (input is null)
            {
                this.ShowAlert("无法访问相机", null);
                return;
            }

            session.AddInput(input);
            try
            {
                var output = new AVCaptureMetadataOutput();
                output.SetDelegate(this, DispatchQueue.MainQueue);
                session.AddOutput(output);

                output.MetadataObjectTypes = AVMetadataObjectType.QRCode;
            }
            catch
            {
                return;
            }

            preview = AVCaptureVideoPreviewLayer.FromSession(session);
            if (preview is null)
            {
                this.ShowAlert("无法显示扫描预览", null);
                return;
            }
            preview.VideoGravity = AVLayerVideoGravity.Resize;
            preview.Frame        = View.Layer.Bounds;
            View.Layer.AddSublayer(preview);

            session.StartRunning();

            codeFrame = new UIView();
            codeFrame.Layer.BorderColor = UIColor.Green.CGColor;
            codeFrame.Layer.BorderWidth = 2;
            View.AddSubview(codeFrame);
            View.BringSubviewToFront(codeFrame);
        }
Exemple #28
0
        public void MetadataObjectTypesTest()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(8, 0))
            {
                Assert.Ignore("Test only works correctly in iOS 8+");
            }

            if (Runtime.Arch != Arch.DEVICE)
            {
                Assert.Ignore("This test only runs on device (requires camera access)");
            }

            var auth = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);

            switch (auth)
            {
            case AVAuthorizationStatus.Restricted:
            case AVAuthorizationStatus.Denied:
                Assert.Fail("This test requires access to the camera, but the app has been denied access.");
                break;
            }

            using (var captureSession = new AVCaptureSession()) {
                using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                    NSError error;
                    using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) {
                        if (captureSession.CanAddInput(videoInput))
                        {
                            captureSession.AddInput(videoInput);
                        }

                        using (var metadataOutput = new AVCaptureMetadataOutput()) {
                            if (captureSession.CanAddOutput(metadataOutput))
                            {
                                captureSession.AddOutput(metadataOutput);
                            }

                            AVMetadataObjectType all = AVMetadataObjectType.None;
                            foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType)))
                            {
                                metadataOutput.MetadataObjectTypes = val;
                                all |= val;
                                Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString());
                            }
                            metadataOutput.MetadataObjectTypes = all;
                            Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString());
                        }
                    }
                }
            }
        }
Exemple #29
0
 public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
 {
     foreach (var metadata in metadataObjects)
     {
         if (!barcodeScanner.IsScannerActive)
         {
             return;
         }
         SystemSound.Vibrate.PlaySystemSound();
         string resultstring = ((AVMetadataMachineReadableCodeObject)metadata).StringValue;
         barcodeScanner.Barcode         = resultstring;
         barcodeScanner.IsScannerActive = false;
         barcodeScanner.Barcode         = null;
         return;
     }
 }
        public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
        {
            if (!barcodeScanner.BarcodeDecoder)
            {
                return;
            }

            foreach (var metadata in metadataObjects)
            {
                barcodeScanner.Barcode = new RebuyBarcode(
                    ((AVMetadataMachineReadableCodeObject)metadata).StringValue,
                    metadata.Type.ConvertToPcl()
                    );
                return;
            }
        }
			public override void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
			{
				if (metadataObjects == null || metadataObjects.Length == 0) {
					outer.QRCodeFrameView.Frame = new CGRect ();
					outer.messageLabel.Text = "No QR code is detected";
					outer.messageLabel.BackgroundColor = UIColor.LightGray;
				} else {
					var metadataObj = metadataObjects [0] as AVMetadataMachineReadableCodeObject;
					if (metadataObj.Type == AVMetadataObjectType.QRCode){
						var barCodeObject = outer.VideoPreviewLayer.GetTransformedMetadataObject(metadataObj) as AVMetadataMachineReadableCodeObject;
						outer.QRCodeFrameView.Frame = barCodeObject.Bounds;
						if (!Success && metadataObj.StringValue != null) {
							Success = true;
							outer.addAttendance (metadataObj.StringValue);
						}
					}
				}

			}
Exemple #32
0
        public void MetadataObjectTypesTest()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(8, 0))
            {
                Assert.Ignore("Test only works correctly in iOS 8+");
            }

            if (Runtime.Arch != Arch.DEVICE)
            {
                Assert.Ignore("This test only runs on device (requires camera access)");
            }

            TestRuntime.RequestCameraPermission(AVMediaType.Video, true);

            using (var captureSession = new AVCaptureSession()) {
                using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                    NSError error;
                    using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) {
                        if (captureSession.CanAddInput(videoInput))
                        {
                            captureSession.AddInput(videoInput);
                        }

                        using (var metadataOutput = new AVCaptureMetadataOutput()) {
                            if (captureSession.CanAddOutput(metadataOutput))
                            {
                                captureSession.AddOutput(metadataOutput);
                            }

                            AVMetadataObjectType all = AVMetadataObjectType.None;
                            foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType)))
                            {
                                metadataOutput.MetadataObjectTypes = val;
                                all |= val;
                                Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString());
                            }
                            metadataOutput.MetadataObjectTypes = all;
                            Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString());
                        }
                    }
                }
            }
        }
Exemple #33
0
        void setupCaptureSession()
        {
            if (CaptureSession != null)
            {
                return;
            }

            CaptureSession = new AVCaptureSession();

            NSNotificationCenter.DefaultCenter.AddObserver(null, captureSessionNotification, CaptureSession);

            applicationWillEnterForegroundNotificationObserver =
                NSNotificationCenter.DefaultCenter.AddObserver(UIApplication.WillEnterForegroundNotification.ToString(),
                                                               UIApplication.SharedApplication,
                                                               NSOperationQueue.CurrentQueue, delegate(NSNotification notification) {
                applicationWillEnterForeground();
            });

            videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            NSError error;

            videoInput = new AVCaptureDeviceInput(videoDevice, out error);
            if (CaptureSession.CanAddInput(videoInput))
            {
                CaptureSession.AddInput(videoInput);
            }

            metadataOutput = new AVCaptureMetadataOutput();

            var metadataQueue = new DispatchQueue("com.AVCam.metadata");

            metadataObjectsDelegate = new MetadataObjectsDelegate {
                DidOutputMetadataObjectsAction = DidOutputMetadataObjects
            };
            metadataOutput.SetDelegate(metadataObjectsDelegate, metadataQueue);

            if (CaptureSession.CanAddOutput(metadataOutput))
            {
                CaptureSession.AddOutput(metadataOutput);
            }
        }
Exemple #34
0
        public void SetupCapture()
        {
            this.session = new AVCaptureSession();
            device       = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            //更改这个设置的时候必须先锁定设备,修改完后再解锁,否则崩溃
//			device.LockForConfiguration(out error);
//			//设置闪光灯为自动
//			device.FlashMode = AVCaptureFlashMode.Auto;
//			device.UnlockForConfiguration();

            this.videoInput = new AVCaptureDeviceInput(device, out error);
            if (null != error)
            {
                Console.WriteLine("error=" + error);
            }

            this.metadataOutput = new AVCaptureMetadataOutput();

            if (this.session.CanAddInput(this.videoInput))
            {
                this.session.AddInput(this.videoInput);
            }
            if (this.session.CanAddOutput(this.metadataOutput))
            {
                this.session.AddOutput(this.metadataOutput);
            }

            // 创建dispatch queue.
            DispatchQueue dispatchQueue = new DispatchQueue("kScanQRCodeQueueName");

            metadataOutput.SetDelegate(QRCodeGlobalObject.TheAppDel, dispatchQueue);

            // 设置元数据类型 AVMetadataObjectTypeQRCode
            metadataOutput.MetadataObjectTypes = AVMetadataObjectType.QRCode;

            //初始化预览图层
            this.previewLayer = new AVCaptureVideoPreviewLayer(this.session);
            this.previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspect;
            this.previewLayer.Frame        = new CGRect(0, 0, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Height);
            this.Layer.MasksToBounds       = true;
            this.Layer.InsertSublayer(previewLayer, 0);
        }
Exemple #35
0
        public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput,
                                             AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
        {
            var lostFaces = _faceLayers.Keys.ToList();

            foreach (var metadata in metadataObjects.OfType <AVMetadataFaceObject>())
            {
                var transformed = VideoPreviewLayer.GetTransformedMetadataObject(metadata);
                var face        = transformed as AVMetadataFaceObject;
                var bounds      = transformed.Bounds;

                if (lostFaces.Contains(face.FaceID))
                {
                    lostFaces.Remove(face.FaceID);
                }

                CALayer faceLayer;
                if (!_faceLayers.TryGetValue(face.FaceID, out faceLayer))
                {
                    faceLayer = CreateFaceLayer();
                    _overlayLayer.AddSublayer(faceLayer);
                    _faceLayers.Add(face.FaceID, faceLayer);
                }

                faceLayer.Transform = CATransform3D.Identity;
                faceLayer.Frame     = bounds;

                if (face.HasRollAngle)
                {
                    var transform = RollTransform(face.RollAngle);
                    faceLayer.Transform = faceLayer.Transform.Concat(transform);
                }

                if (face.HasYawAngle)
                {
                    var transform = YawTransform(face.YawAngle);
                    faceLayer.Transform = faceLayer.Transform.Concat(transform);
                }
            }

            RemoveLostFaces(lostFaces);
        }
Exemple #36
0
        /// <summary>
        /// Setups the face detection.
        /// </summary>
        protected void SetupFaceDetection()
        {
            FaceDetectionOutput = new AVCaptureMetadataOutput();
            if (Session.CanAddOutput(FaceDetectionOutput))
            {
                Session.AddOutput(FaceDetectionOutput);

                if (FaceDetectionOutput.AvailableMetadataObjectTypes.HasFlag(
                        AVMetadataObjectType.Face))
                {
                    FaceDetectionOutput.MetadataObjectTypes = AVMetadataObjectType.Face;
                    FaceDetectionOutput.SetDelegate(this, DispatchQueue.MainQueue);
                }
                else
                {
                    Session.RemoveOutput(FaceDetectionOutput);
                    FaceDetectionOutput.Dispose();
                    FaceDetectionOutput = null;
                }
            }
        }
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

			session = new AVCaptureSession ();

			var camera = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
			var input = AVCaptureDeviceInput.FromDevice(camera);
			session.AddInput(input);

			output = new AVCaptureMetadataOutput();
			var metadataDelegate = new MetadataOutputDelegate();
			output.SetDelegate(metadataDelegate, DispatchQueue.MainQueue);
			session.AddOutput(output);

			output.MetadataObjectTypes = new NSString[] {
				AVMetadataObject.TypeQRCode,
				AVMetadataObject.TypeEAN13Code
			};

			var previewLayer = new AVCaptureVideoPreviewLayer(session);
			//var view = new ContentView(UIColor.LightGray, previewLayer, metadataDelegate);

			previewLayer.MasksToBounds = true;
			previewLayer.VideoGravity = AVCaptureVideoPreviewLayer.GravityResizeAspectFill;
			previewLayer.Frame = UIScreen.MainScreen.Bounds;
			this.View.Layer.AddSublayer(previewLayer);

			metadataDelegate.MetadataFound += (s, e) => {
				session.StopRunning();
				new UIAlertView("Scanned!",e.StringValue, null ,"OK",null).Show();
			};

			session.StartRunning();

		}
        void teardownAVFoundationFaceDetection()
        {
            if (metadataOutput != null)
                session.RemoveOutput (metadataOutput);

            metadataOutput = null;
            faceViews = null;
        }
        bool SetupCaptureSession()
        {
            session = new AVCaptureSession();

            AVCaptureDevice device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (device == null) {
                Console.WriteLine("No video camera (in simulator?)");
                return false; // simulator?
            }

            NSError error = null;

            AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error);

            if (input == null)
                Console.WriteLine("Error: " + error);
            else
                session.AddInput(input);

            AVCaptureMetadataOutput output = new AVCaptureMetadataOutput();

            var dg = new CaptureDelegate(this);
            output.SetDelegate(dg,	MonoTouch.CoreFoundation.DispatchQueue.MainQueue);
            session.AddOutput(output);

            // This could be any list of supported barcode types
            output.MetadataObjectTypes = new NSString[] {AVMetadataObject.TypeQRCode, AVMetadataObject.TypeAztecCode};
            // OR you could just accept "all" with the following line;
            //			output.MetadataObjectTypes = output.AvailableMetadataObjectTypes;  // empty
            // DEBUG: use this if you're curious about the available types
            //			foreach (var t in output.AvailableMetadataObjectTypes)
            //				Console.WriteLine(t);

            AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(session);
            //previewLayer.Frame = new RectangleF(0,0, View.Frame.Size.Width, View.Frame.Size.Height);
            previewLayer.Frame = new RectangleF(0, 0, 320, 290);
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString();
            View.Layer.AddSublayer (previewLayer);

            session.StartRunning();

            Console.WriteLine("StartRunning");
            return true;
        }
            public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
            {
                string code = "";
                foreach (var metadata in metadataObjects)
                {
                    if (metadata.Type == AVMetadataObject.TypeQRCode) {
                        code = ((AVMetadataMachineReadableCodeObject)metadata).StringValue;
                        Console.WriteLine ("qrcode: " + code);
                    } else {
                        Console.WriteLine ("type: " + metadata.Type);
                        code = ((AVMetadataMachineReadableCodeObject)metadata).StringValue;
                        Console.WriteLine ("----: " + code);
                    }
                }

                if (parent.QrScan != null)
                        parent.QrScan (code);
            }
		public void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject [] metadataObjects, AVCaptureConnection connection)
		{
			// resetEvent is used to drop new notifications if old ones are still processing, to avoid queueing up a bunch of stale data.
			if (resetEvent.WaitOne (0)) {
				DispatchQueue.MainQueue.DispatchAsync (() => {
					RemoveMetadataObjectOverlayLayers ();
					AddMetadataOverlayLayers (metadataObjects.Select (CreateMetadataOverlay));
					resetEvent.Set ();
				});
			}
		}
			public override void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
			{
				if (DidOutputMetadataObjectsAction != null)
					DidOutputMetadataObjectsAction (captureOutput, metadataObjects, connection);
			}
		void setupCaptureSession ()
		{
			if (CaptureSession != null)
				return;

			CaptureSession = new AVCaptureSession ();

			NSNotificationCenter.DefaultCenter.AddObserver (null, captureSessionNotification, CaptureSession);

			applicationWillEnterForegroundNotificationObserver = 
				NSNotificationCenter.DefaultCenter.AddObserver (UIApplication.WillEnterForegroundNotification.ToString (),
			                                                    UIApplication.SharedApplication,
					NSOperationQueue.CurrentQueue, delegate(NSNotification notification) {
				applicationWillEnterForeground ();                                          	
			});

			videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);

			NSError error;
			videoInput = new AVCaptureDeviceInput (videoDevice, out error);
			if (CaptureSession.CanAddInput (videoInput))
				CaptureSession.AddInput (videoInput);

			metadataOutput = new AVCaptureMetadataOutput ();

			var metadataQueue = new DispatchQueue ("com.AVCam.metadata");
			metadataObjectsDelegate = new MetadataObjectsDelegate {
				DidOutputMetadataObjectsAction = DidOutputMetadataObjects
			};
			metadataOutput.SetDelegate (metadataObjectsDelegate, metadataQueue);

			if (CaptureSession.CanAddOutput (metadataOutput))
				CaptureSession.AddOutput (metadataOutput);
		}