public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { string text = string.Empty; for (int i = 0; i < metadataObjects.Length; i++) { AVMetadataObject aVMetadataObject = metadataObjects[i]; AVMetadataMachineReadableCodeObject aVMetadataMachineReadableCodeObject = aVMetadataObject as AVMetadataMachineReadableCodeObject; if (aVMetadataMachineReadableCodeObject != null) { AVMetadataMachineReadableCodeObject aVMetadataMachineReadableCodeObject2 = aVMetadataMachineReadableCodeObject; text = aVMetadataMachineReadableCodeObject2.StringValue; text = this._view.ScannedBarcode(text); if (!string.IsNullOrEmpty(text)) { int num = this.Buffer.Add(text); if (num > 0) { this._view.ScanOccurred(text); } } } else { iApp.Log.Info("Invalid AVMetadataObject type: " + aVMetadataObject.Type.ToString(), new object[0]); } } }
public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { foreach(var m in metadataObjects) { if(m is AVMetadataMachineReadableCodeObject) { MetadataFound(this, m as AVMetadataMachineReadableCodeObject); } } }
// An AutoResetEvent instance is used for drawing metadata object overlays so that // only one group of metadata object overlays is drawn at a time. MetadataObjectLayer CreateMetadataOverlay(AVMetadataObject metadataObject) { // Transform the metadata object so the bounds are updated to reflect those of the video preview layer. var transformedMetadataObject = PreviewView.VideoPreviewLayer.GetTransformedMetadataObject(metadataObject); // Create the initial metadata object overlay layer that can be used for either machine readable codes or faces. var metadataObjectOverlayLayer = new MetadataObjectLayer { MetadataObject = transformedMetadataObject, LineJoin = CAShapeLayer.JoinRound, LineWidth = 7, StrokeColor = View.TintColor.ColorWithAlpha(0.7f).CGColor, FillColor = View.TintColor.ColorWithAlpha(0.3f).CGColor }; var barcodeMetadataObject = transformedMetadataObject as AVMetadataMachineReadableCodeObject; if (barcodeMetadataObject != null) { var barcodeOverlayPath = BarcodeOverlayPathWithCorners(barcodeMetadataObject.Corners); metadataObjectOverlayLayer.Path = barcodeOverlayPath; // If the metadata object has a string value, display it. if (barcodeMetadataObject.StringValue.Length > 0) { var barcodeOverlayBoundingBox = barcodeOverlayPath.BoundingBox; var font = UIFont.BoldSystemFontOfSize(19).ToCTFont(); var textLayer = new CATextLayer { AlignmentMode = CATextLayer.AlignmentCenter, Bounds = new CGRect(0, 0, barcodeOverlayBoundingBox.Size.Width, barcodeOverlayBoundingBox.Size.Height), ContentsScale = UIScreen.MainScreen.Scale, Position = new CGPoint(barcodeOverlayBoundingBox.GetMidX(), barcodeOverlayBoundingBox.GetMidY()), Wrapped = true, // Invert the effect of transform of the video preview so the text is orientated with the interface orientation. Transform = CATransform3D.MakeFromAffine(PreviewView.Transform).Invert(default(CATransform3D)), AttributedString = new NSAttributedString(barcodeMetadataObject.StringValue, new CTStringAttributes { Font = font, ForegroundColor = UIColor.White.CGColor, StrokeWidth = -5, StrokeColor = UIColor.Black.CGColor }) }; textLayer.SetFont(font); metadataObjectOverlayLayer.AddSublayer(textLayer); } } else if (transformedMetadataObject is AVMetadataFaceObject) { metadataObjectOverlayLayer.Path = CGPath.FromRect(transformedMetadataObject.Bounds); } return(metadataObjectOverlayLayer); }
public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] faces, AVCaptureConnection connection) { List<int> unseen = faceViews.Keys.ToList (); List<int> seen = new List<int> (); CATransaction.Begin (); CATransaction.SetValueForKey (NSObject.FromObject (true), (NSString) (CATransaction.DisableActions.ToString ())); foreach (var face in faces) { int faceId = (face as AVMetadataFaceObject).FaceID; unseen.Remove (faceId); seen.Add (faceId); FaceView view; if (faceViews.ContainsKey (faceId)) view = faceViews [faceId]; else { view = new FaceView (); view.Layer.CornerRadius = 10; view.Layer.BorderWidth = 3; view.Layer.BorderColor = UIColor.Green.CGColor; previewView.AddSubview (view); faceViews.Add (faceId, view); view.Id = faceId; view.Callback = TouchCallBack; if (lockedFaceID != null) view.Alpha = 0; } AVMetadataFaceObject adjusted = (AVMetadataFaceObject)(previewView.Layer as AVCaptureVideoPreviewLayer).GetTransformedMetadataObject (face); view.Frame = adjusted.Bounds; } foreach (int faceId in unseen) { FaceView view = faceViews [faceId]; view.RemoveFromSuperview (); faceViews.Remove (faceId); if (faceId == lockedFaceID) clearLockedFace (); } if (lockedFaceID != null) { FaceView view = faceViews [lockedFaceID.GetValueOrDefault ()]; float size = (float)Math.Max (view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor; float zoomDelta = lockedFaceSize / size; float lockTime = (float)(CATransition.CurrentMediaTime () - this.lockTime); float zoomRate = (float)(Math.Log (zoomDelta) / lockTime); if (Math.Abs (zoomDelta) > 0.1) device.RampToVideoZoom (zoomRate > 0 ? MaxZoom : 1, zoomRate); } CATransaction.Commit (); }
public override void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { if (metadataObjects == null || metadataObjects.Length == 0) { outer.QRCodeFrameView.Frame = new CGRect (); outer.messageLabel.Text = "No QR code is detected"; outer.messageLabel.BackgroundColor = UIColor.LightGray; } else { var metadataObj = metadataObjects [0] as AVMetadataMachineReadableCodeObject; if (metadataObj.Type == AVMetadataObjectType.QRCode){ var barCodeObject = outer.VideoPreviewLayer.GetTransformedMetadataObject(metadataObj) as AVMetadataMachineReadableCodeObject; outer.QRCodeFrameView.Frame = barCodeObject.Bounds; if (!Success && metadataObj.StringValue != null) { Success = true; outer.addAttendance (metadataObj.StringValue); } } } }
public override void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { if (DidOutputMetadataObjectsAction != null) DidOutputMetadataObjectsAction (captureOutput, metadataObjects, connection); }
public void DidOutputMetadataObjects (AVCaptureOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { Barcodes = metadataObjects.ToList (); }
//partial void SelectMetadataObjectTypes(UIButton sender) //{ // var controller = new ItemSelectionViewController<AVMetadataObjectType>(this, // MetadataObjectTypeItemSelectionIdentifier, // this.metadataOutput.AvailableMetadataObjectTypes.GetFlags().ToList(), // this.metadataOutput.MetadataObjectTypes.GetFlags().ToList(), // true); // this.PresentItemSelectionViewController(controller); //} /// <summary> /// 2.스캔결과값 /// </summary> /// <param name="metadataObject"></param> /// <returns></returns> private MetadataObjectLayer CreateMetadataOverlay(AVMetadataObject metadataObject) { // Transform the metadata object so the bounds are updated to reflect those of the video preview layer. var transformedMetadataObject = this.PreviewView.VideoPreviewLayer.GetTransformedMetadataObject(metadataObject); // Create the initial metadata object overlay layer that can be used for either machine readable codes or faces. //빔 컬러 var metadataObjectOverlayLayer = new MetadataObjectLayer { LineWidth = 3, LineJoin = CAShapeLayer.JoinRound, MetadataObject = transformedMetadataObject, FillColor = UIColor.Red.CGColor, StrokeColor = UIColor.Red.CGColor //FillColor = this.View.TintColor.ColorWithAlpha(0.3f).CGColor, //StrokeColor = this.View.TintColor.ColorWithAlpha(0.7f).CGColor }; var barcodeMetadataObject = transformedMetadataObject as AVMetadataMachineReadableCodeObject; if (barcodeMetadataObject != null) { var barcodeOverlayPath = this.BarcodeOverlayPathWithCorners(barcodeMetadataObject.Corners); metadataObjectOverlayLayer.Path = barcodeOverlayPath; // If the metadata object has a string value, display it. string textLayerString = null; //스캔된 바코드 값 //바코드 타입 : barcodeMetadataObject.Type if (!string.IsNullOrEmpty(barcodeMetadataObject.StringValue)) { textLayerString = barcodeMetadataObject.StringValue; } else { // TODO: add Descriptor (line 618 in original iOS sample) } //스캔된 바코드 값 if (!string.IsNullOrEmpty(textLayerString)) { Console.WriteLine("========Result========"); Console.WriteLine(barcodeMetadataObject.Type + ", " + barcodeMetadataObject.StringValue); //this.PreviewView.customOverlay.tableSource.tableItems.Add(new TableItem { Heading = "heading", SubHeading = "subheading", ImageName = "barcode36x36.png" }); //this.InvokeOnMainThread(() => { this.PreviewView.customOverlay.RowAdd("aa", "bb"); }); var barcodeOverlayBoundingBox = barcodeOverlayPath.BoundingBox; var font = UIFont.BoldSystemFontOfSize(16).ToCTFont(); var textLayer = new CATextLayer(); textLayer.TextAlignmentMode = CATextLayerAlignmentMode.Center; //글자표시 박스, 높이가 작아서 글자 안보임 +50추가 textLayer.Bounds = new CGRect(0, 0, barcodeOverlayBoundingBox.Size.Width + 20, barcodeOverlayBoundingBox.Size.Height + 50); //textLayer.Bounds = new CGRect(0, 0, 300, 100); textLayer.ContentsScale = UIScreen.MainScreen.Scale; textLayer.Position = new CGPoint(barcodeOverlayBoundingBox.GetMidX(), barcodeOverlayBoundingBox.GetMidY()); textLayer.Wrapped = true; textLayer.Transform = CATransform3D.MakeFromAffine(this.PreviewView.Transform).Invert(); //------------------------------------- //스캔된 바코드 biz logic //------------------------------------- //연속 스캔 if (this.IsContinue) { if (this.IsFixed) { if (this.AllScanBarcode.Contains(textLayerString)) { //1. 저장 했는지? if (this.SaveCompletedBarcode.Contains(textLayerString)) { //저장 완료 //Color.Green textLayer.String = "저장 완료\n" + textLayerString; textLayer.AttributedString = new NSAttributedString(textLayer.String, new CTStringAttributes { Font = font, StrokeWidth = 0, StrokeColor = UIColor.Black.CGColor, ForegroundColor = UIColor.Green.CGColor }); //경고 audioCautionPlayer.Play(); doubleVibrator.PlaySystemSoundAsync(); } //2. Scan 완료 했는지? else if (this.ScanCompletedBarcode.Contains(textLayerString)) { //스캔 완료 //Color.Yellow textLayer.String = "스캔 완료\n" + textLayerString; textLayer.AttributedString = new NSAttributedString(textLayer.String, new CTStringAttributes { Font = font, StrokeWidth = 0, StrokeColor = UIColor.Black.CGColor, ForegroundColor = UIColor.Yellow.CGColor }); //경고 audioCautionPlayer.Play(); doubleVibrator.PlaySystemSoundAsync(); } else { //------------ //정상처리 작업 //------------ OnScanCompleted?.Invoke(barcodeMetadataObject.Type.ToString(), textLayerString); textLayer.String = textLayerString; textLayer.AttributedString = new NSAttributedString(textLayer.String, new CTStringAttributes { Font = font, StrokeWidth = 0, StrokeColor = UIColor.Black.CGColor, ForegroundColor = UIColor.White.CGColor }); //정상 audioPlayer.Play(); SystemSound.Vibrate.PlaySystemSoundAsync(); if (!this.ScanCompletedBarcode.Contains(textLayerString)) { this.ScanCompletedBarcode.Add(textLayerString); } if (this.AllScanBarcode.Count == this.SaveCompletedBarcode.Count + this.ScanCompletedBarcode.Count) { this.session.StopRunning(); OnScanCompleted?.Invoke(string.Empty, "EXIT"); DismissViewController(true, null); //화면 종료 } else { //연속스캔 사이의 간격 지정 //이 함수 호출하는 부분에서 처리 } } } else { //스캔 대상X //Color.Red textLayer.String = "스캔 대상X\n" + textLayerString; textLayer.AttributedString = new NSAttributedString(textLayer.String, new CTStringAttributes { Font = font, StrokeWidth = 0, StrokeColor = UIColor.Black.CGColor, ForegroundColor = UIColor.Red.CGColor }); //경고 audioCautionPlayer.Play(); doubleVibrator.PlaySystemSoundAsync(); } } //비고정(스캔 대상 없음) else { //현재로서는 biz로직 없음 } } //단일 스캔 else { this.session.StopRunning(); textLayer.String = textLayerString; textLayer.AttributedString = new NSAttributedString(textLayer.String, new CTStringAttributes { Font = font, StrokeWidth = 0, StrokeColor = UIColor.Black.CGColor, ForegroundColor = UIColor.White.CGColor }); //정상 audioPlayer.Play(); SystemSound.Vibrate.PlaySystemSoundAsync(); OnScanCompleted?.Invoke(barcodeMetadataObject.Type.ToString(), textLayerString); DismissViewController(true, null); //화면 종료 } //화면에 표시 textLayer.SetFont(font); metadataObjectOverlayLayer.AddSublayer(textLayer); } } else if (transformedMetadataObject is AVMetadataFaceObject) { metadataObjectOverlayLayer.Path = CGPath.FromRect(transformedMetadataObject.Bounds); } return(metadataObjectOverlayLayer); }
public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { string code = ""; foreach (var metadata in metadataObjects) { if (metadata.Type == AVMetadataObject.TypeQRCode) { code = ((AVMetadataMachineReadableCodeObject)metadata).StringValue; Console.WriteLine ("qrcode: " + code); } else { Console.WriteLine ("type: " + metadata.Type); code = ((AVMetadataMachineReadableCodeObject)metadata).StringValue; Console.WriteLine ("----: " + code); } } if (parent.QrScan != null) parent.QrScan (code); }
public void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject [] metadataObjects, AVCaptureConnection connection) { // resetEvent is used to drop new notifications if old ones are still processing, to avoid queueing up a bunch of stale data. if (resetEvent.WaitOne (0)) { DispatchQueue.MainQueue.DispatchAsync (() => { RemoveMetadataObjectOverlayLayers (); AddMetadataOverlayLayers (metadataObjects.Select (CreateMetadataOverlay)); resetEvent.Set (); }); } }
// An AutoResetEvent instance is used for drawing metadata object overlays so that // only one group of metadata object overlays is drawn at a time. MetadataObjectLayer CreateMetadataOverlay (AVMetadataObject metadataObject) { // Transform the metadata object so the bounds are updated to reflect those of the video preview layer. var transformedMetadataObject = PreviewView.VideoPreviewLayer.GetTransformedMetadataObject (metadataObject); // Create the initial metadata object overlay layer that can be used for either machine readable codes or faces. var metadataObjectOverlayLayer = new MetadataObjectLayer { MetadataObject = transformedMetadataObject, LineJoin = CAShapeLayer.JoinRound, LineWidth = 7, StrokeColor = View.TintColor.ColorWithAlpha (0.7f).CGColor, FillColor = View.TintColor.ColorWithAlpha (0.3f).CGColor }; var barcodeMetadataObject = transformedMetadataObject as AVMetadataMachineReadableCodeObject; if (barcodeMetadataObject != null) { var barcodeOverlayPath = BarcodeOverlayPathWithCorners (barcodeMetadataObject.Corners); metadataObjectOverlayLayer.Path = barcodeOverlayPath; // If the metadata object has a string value, display it. if (barcodeMetadataObject.StringValue.Length > 0) { var barcodeOverlayBoundingBox = barcodeOverlayPath.BoundingBox; var font = UIFont.BoldSystemFontOfSize (19).ToCTFont (); var textLayer = new CATextLayer { AlignmentMode = CATextLayer.AlignmentCenter, Bounds = new CGRect (0, 0, barcodeOverlayBoundingBox.Size.Width, barcodeOverlayBoundingBox.Size.Height), ContentsScale = UIScreen.MainScreen.Scale, Position = new CGPoint (barcodeOverlayBoundingBox.GetMidX (), barcodeOverlayBoundingBox.GetMidY ()), Wrapped = true, // Invert the effect of transform of the video preview so the text is orientated with the interface orientation. Transform = CATransform3D.MakeFromAffine (PreviewView.Transform).Invert (default (CATransform3D)), AttributedString = new NSAttributedString (barcodeMetadataObject.StringValue, new CTStringAttributes { Font = font, ForegroundColor = UIColor.White.CGColor, StrokeWidth = -5, StrokeColor = UIColor.Black.CGColor }) }; textLayer.SetFont (font); metadataObjectOverlayLayer.AddSublayer (textLayer); } } else if (transformedMetadataObject is AVMetadataFaceObject) { metadataObjectOverlayLayer.Path = CGPath.FromRect (transformedMetadataObject.Bounds); } return metadataObjectOverlayLayer; }