/// <Docs>The capture output on which the frame was captured.</Docs> /// <param name="connection">The connection on which the video frame was received.</param> /// <remarks>Unless you need to keep the buffer for longer, you must call /// Dispose() on the sampleBuffer before returning. The system /// has a limited pool of video frames, and once it runs out of /// those buffers, the system will stop calling this method /// until the buffers are released.</remarks> /// <summary> /// Dids the output sample buffer. /// </summary> /// <param name="captureOutput">Capture output.</param> /// <param name="sampleBuffer">Sample buffer.</param> public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { // Trap all errors try { // Grab an image from the buffer var image = GetImageFromSampleBuffer(sampleBuffer); // Display the image if (DisplayView != null) { DisplayView.BeginInvokeOnMainThread(() => { // Set the image DisplayView.Image = image; // Rotate image to the correct display orientation //DisplayView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2); DisplayView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2); }); } // IMPORTANT: You must release the buffer because AVFoundation has a fixed number // of buffers and will stop delivering frames if it runs out. sampleBuffer.Dispose(); } catch (Exception e) { // Report error Console.WriteLine("Error sampling buffer: {0}", e.Message); } }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { // render the image into the debug preview pane UIImage image = getImageFromSampleBuffer (sampleBuffer); // event the capture up OnImageCaptured (image); // make sure AVFoundation does not run out of buffers sampleBuffer.Dispose (); } catch (Exception ex) { string exceptionText = ErrorHandling.GetExceptionDetailedText (ex); string errorMessage = $"Failed to process image capture: {exceptionText}"; OnCaptureError (errorMessage); } }
public override void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { if (metadataObjects == null || metadataObjects.Length == 0) { outer.QRCodeFrameView.Frame = new CGRect (); outer.messageLabel.Text = "No QR code is detected"; outer.messageLabel.BackgroundColor = UIColor.LightGray; } else { var metadataObj = metadataObjects [0] as AVMetadataMachineReadableCodeObject; if (metadataObj.Type == AVMetadataObjectType.QRCode){ var barCodeObject = outer.VideoPreviewLayer.GetTransformedMetadataObject(metadataObj) as AVMetadataMachineReadableCodeObject; outer.QRCodeFrameView.Frame = barCodeObject.Bounds; if (!Success && metadataObj.StringValue != null) { Success = true; outer.addAttendance (metadataObj.StringValue); } } } }
/// <summary> /// Gets called by the video session if a new image is available. /// </summary> /// <param name="captureOutput">Capture output.</param> /// <param name="sampleBuffer">Sample buffer.</param> /// <param name="connection">Connection.</param> public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { // Convert the raw image data into a CGImage. using(CGImage sourceImage = GetImageFromSampleBuffer(sampleBuffer)) { this.OnImageCaptured( sourceImage ); } // Make sure AVFoundation does not run out of buffers sampleBuffer.Dispose (); } catch (Exception ex) { string errorMessage = string.Format("Failed to process image capture: {0}", ex); this.OnCaptureError( errorMessage, ex ); } }
public void DidDropSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { //Console.WriteLine("DROPPED"); }
public void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { var currentDate = DateTime.Now; //Console.WriteLine("DidOutputSampleBuffer: " + currentDate + " " + lastAnalysis + " " + currentDate.Subtract(lastAnalysis).Milliseconds); // control the pace of the machine vision to protect battery life if (currentDate - lastAnalysis >= pace) { lastAnalysis = currentDate; } else { //Console.WriteLine("-- skip --"); return; // don't run the classifier more often than we need } // keep track of performance and log the frame rate if (trackPerformance) { frameCount = frameCount + 1; if (frameCount % framesPerSample == 0) { var diff = currentDate.Subtract(startDate); if (diff.Seconds > 0) { if (pace > TimeSpan.Zero) { Console.WriteLine("WARNING: Frame rate of image classification is being limited by \"pace\" setting. Set to 0.0 for fastest possible rate."); } } Console.WriteLine($"{diff.Seconds / framesPerSample}s per frame (average"); } startDate = currentDate; } // Crop and resize the image data. // Note, this uses a Core Image pipeline that could be appended with other pre-processing. // If we don't want to do anything custom, we can remove this step and let the Vision framework handle // crop and resize as long as we are careful to pass the orientation properly. using (var croppedBuffer = CroppedSampleBuffer(sampleBuffer, targetImageSize)) { if (croppedBuffer == null) { return; } try { VNImageOptions options = new VNImageOptions(); classifierRequestHandler = new VNImageRequestHandler(croppedBuffer, options); NSError err; classifierRequestHandler.Perform(ClassificationRequest, out err); if (err != null) { Console.WriteLine(err); } } catch (Exception error) { Console.WriteLine(error); } } } finally { sampleBuffer.Dispose(); } }
/// <Docs>The capture output on which the frame was captured.</Docs> /// <param name="connection">The connection on which the video frame was received.</param> /// <remarks>Unless you need to keep the buffer for longer, you must call /// Dispose() on the sampleBuffer before returning. The system /// has a limited pool of video frames, and once it runs out of /// those buffers, the system will stop calling this method /// until the buffers are released.</remarks> /// <summary> /// Dids the output sample buffer. /// </summary> /// <param name="captureOutput">Capture output.</param> /// <param name="sampleBuffer">Sample buffer.</param> public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { // Trap all errors try { // Grab an image from the buffer var image = GetImageFromSampleBuffer(sampleBuffer); // Display the image if (DisplayView != null) { DisplayView.BeginInvokeOnMainThread(() => { // Set the image DisplayView.Image = image; // Rotate image to the correct display orientation DisplayView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2); }); } // IMPORTANT: You must release the buffer because AVFoundation has a fixed number // of buffers and will stop delivering frames if it runs out. sampleBuffer.Dispose(); } catch (Exception e) { // Report error Console.WriteLine("Error sampling buffer: {0}", e.Message); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { lock (syncObject) { this.TryDispose(image); image = ImageFromSampleBuffer(sampleBuffer); } } catch (Exception e) { Console.WriteLine(e); } finally { // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose(); } }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { var image = ImageFromSampleBuffer (sampleBuffer); // Do something with the image, we just stuff it in our main view. ImageView.BeginInvokeOnMainThread(() => { TryDispose (ImageView.Image); ImageView.Image = image; ImageView.Transform = CGAffineTransform.MakeRotation (NMath.PI / 2); }); } catch (Exception e) { Console.WriteLine (e); } finally { sampleBuffer.Dispose (); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { LuminanceSource luminance; using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { if (bytes == null) { bytes = new byte [pixelBuffer.Height * pixelBuffer.BytesPerRow]; } pixelBuffer.Lock(0); Marshal.Copy(pixelBuffer.BaseAddress, bytes, 0, bytes.Length); luminance = new RGBLuminanceSource(bytes, pixelBuffer.Width, pixelBuffer.Height); pixelBuffer.Unlock(0); } var binarized = new BinaryBitmap(new HybridBinarizer(luminance)); var result = reader.decodeWithState(binarized); //parent.session.StopRunning (); if (parent.QrScan != null) { parent.QrScan(result); } } catch (ReaderException) { // ignore this exception; it happens every time there is a failed scan } catch (Exception e) { // TODO: this one is unexpected.. log or otherwise handle it throw; } finally { try { // lamest thing, but seems that this throws :( sampleBuffer.Dispose(); } catch { } } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { lastRunTime = DateTimeOffset.Now.ToUnixTimeMilliseconds(); if (lastRunTime - lastAnalysisTime > scanIntervalInMs && Configuration.IsScanning) { lastAnalysisTime = lastRunTime; try { var image = GetImageFromSampleBuffer(sampleBuffer); if (image == null) { return; } Width = (float)image.Size.Width; Height = (float)image.Size.Height; var visionImage = new VisionImage(image) { Metadata = metadata }; releaseSampleBuffer(sampleBuffer); DetectBarcodeActionAsync(visionImage); } catch (Exception exception) { System.Diagnostics.Debug.WriteLine(exception.Message); } } releaseSampleBuffer(sampleBuffer); }
public void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject[] faces, AVCaptureConnection connection) { List<int> unseen = faceViews.Keys.ToList (); List<int> seen = new List<int> (); CATransaction.Begin (); CATransaction.SetValueForKey (NSObject.FromObject (true), (NSString) (CATransaction.DisableActions.ToString ())); foreach (var face in faces) { // HACK: int faceId = (face as AVMetadataFaceObject).FaceID; int faceId = (int)(face as AVMetadataFaceObject).FaceID; unseen.Remove (faceId); seen.Add (faceId); FaceView view; if (faceViews.ContainsKey (faceId)) view = faceViews [faceId]; else { view = new FaceView (); view.Layer.CornerRadius = 10; view.Layer.BorderWidth = 3; view.Layer.BorderColor = UIColor.Green.CGColor; previewView.AddSubview (view); faceViews.Add (faceId, view); view.Id = faceId; view.Callback = TouchCallBack; if (lockedFaceID != null) view.Alpha = 0; } AVMetadataFaceObject adjusted = (AVMetadataFaceObject)(previewView.Layer as AVCaptureVideoPreviewLayer).GetTransformedMetadataObject (face); view.Frame = adjusted.Bounds; } foreach (int faceId in unseen) { FaceView view = faceViews [faceId]; view.RemoveFromSuperview (); faceViews.Remove (faceId); if (faceId == lockedFaceID) clearLockedFace (); } if (lockedFaceID != null) { FaceView view = faceViews [lockedFaceID.GetValueOrDefault ()]; // HACK: Cast resulting nfloat to float // float size = (float)Math.Max (view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor; float size = (float)(Math.Max (view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor); float zoomDelta = lockedFaceSize / size; float lockTime = (float)(CATransition.CurrentMediaTime () - this.lockTime); float zoomRate = (float)(Math.Log (zoomDelta) / lockTime); if (Math.Abs (zoomDelta) > 0.1) device.RampToVideoZoom (zoomRate > 0 ? MaxZoom : 1, zoomRate); } CATransaction.Commit (); }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { // Lock the base address pixelBuffer.Lock(CVPixelBufferLock.ReadOnly); // Get the number of bytes per row for the pixel buffer var baseAddress = pixelBuffer.BaseAddress; int bytesPerRow = (int)pixelBuffer.BytesPerRow; int width = (int)pixelBuffer.Width; int height = (int)pixelBuffer.Height; byte[] managedArray = new byte[width * height]; Marshal.Copy(baseAddress, managedArray, 0, width * height); pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly); _formsCameraView.RaiseFrameAvailable(managedArray); } }
public void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject [] metadataObjects, AVCaptureConnection connection) { // resetEvent is used to drop new notifications if old ones are still processing, to avoid queueing up a bunch of stale data. if (resetEvent.WaitOne (0)) { DispatchQueue.MainQueue.DispatchAsync (() => { RemoveMetadataObjectOverlayLayers (); AddMetadataOverlayLayers (metadataObjects.Select (CreateMetadataOverlay)); resetEvent.Set (); }); } }
public override void DidOutputMetadataObjects (AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { if (DidOutputMetadataObjectsAction != null) DidOutputMetadataObjectsAction (captureOutput, metadataObjects, connection); }
public void DidOutputMetadataObjects (AVCaptureOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { Barcodes = metadataObjects.ToList (); }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer){ int width = (int) pixelBuffer.Width; int height = (int) pixelBuffer.Height; if (container.ripple == null || width != textureWidth || height != textureHeight){ textureWidth = width; textureHeight = height; container.SetupRipple (textureWidth, textureHeight); } CleanupTextures (); // Y-plane GL.ActiveTexture(TextureUnit.Texture0); All re = (All) 0x1903; // GL_RED_EXT, RED component from ARB OpenGL extension CVReturn status; lumaTexture = container.videoTextureCache.TextureFromImage (pixelBuffer, true, re, textureWidth, textureHeight, re, DataType.UnsignedByte, 0, out status); if (lumaTexture == null){ Console.WriteLine ("Error creating luma texture: {0}", status); return; } GL.BindTexture (lumaTexture.Target, lumaTexture.Name); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int) All.ClampToEdge); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int) All.ClampToEdge); // UV Plane GL.ActiveTexture (TextureUnit.Texture1); re = (All) 0x8227; // GL_RG_EXT, RED GREEN component from ARB OpenGL extension chromaTexture = container.videoTextureCache.TextureFromImage (pixelBuffer, true, re, textureWidth/2, textureHeight/2, re, DataType.UnsignedByte, 1, out status); if (chromaTexture == null){ Console.WriteLine ("Error creating chroma texture: {0}", status); return; } GL.BindTexture (chromaTexture.Target, chromaTexture.Name); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int) All.ClampToEdge); GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int) All.ClampToEdge); } } finally { sampleBuffer.Dispose (); } }
public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] faces, AVCaptureConnection connection) { if (DidOutputMetadataObjectsAction != null) { DidOutputMetadataObjectsAction(captureOutput, faces, connection); } }
bool SetupCaptureSession () { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession (); captureSession.BeginConfiguration (); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); if (audioDevice == null) return false; // e.g. simulator var audioIn = new AVCaptureDeviceInput (audioDevice, out error); if (captureSession.CanAddInput (audioIn)) captureSession.AddInput (audioIn); var audioOut = new AVCaptureAudioDataOutput (); var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue); if (captureSession.CanAddOutput (audioOut)) captureSession.AddOutput (audioOut); audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput (videoDevice, out error); if (captureSession.CanAddInput (videoIn)) captureSession.AddInput (videoIn); // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, // HACK: Change VideoSettings to WeakVideoSettings, and AVVideoSettings to CVPixelBufferAttributes // VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) WeakVideoSettings = new CVPixelBufferAttributes () { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue ("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue); if (captureSession.CanAddOutput (videoOut)) captureSession.AddOutput (videoOut); // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration (); return true; }
public override void DidDropSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { //Console.WriteLine("Dropped Sample Buffer"); }
public virtual void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { // HACK: Change CMSampleBuffer.GetFormatDescription() to CMSampleBuffer.GetVideoFormatDescription() // HACK Change CMFormatDescription to CMVideoFormatDescription // CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription (); CMVideoFormatDescription formatDescription = sampleBuffer.GetVideoFormatDescription (); if (connection == videoConnection) { // Get framerate CMTime timestamp = sampleBuffer.PresentationTimeStamp; CalculateFramerateAtTimestamp (timestamp); // Get frame dimensions (for onscreen display) if (VideoDimensions.IsEmpty) // HACK: Change GetVideoPresentationDimensions() to GetPresentationDimensions() // VideoDimensions = formatDescription.GetVideoPresentationDimensions (true, false); VideoDimensions = formatDescription.GetPresentationDimensions (true, false); // Get the buffer type if (VideoType == 0) VideoType = formatDescription.MediaSubType; // Synchronously process the pixel buffer to de-green it. using (var pixelBuffer = sampleBuffer.GetImageBuffer ()) ProcessPixelBuffer (pixelBuffer); previewBufferQueue.Enqueue (sampleBuffer); //var writeBuffer = sampleBuffer.Duplicate (); InvokeOnMainThread (() => { INativeObject j = previewBufferQueue.Dequeue (); var sbuf = j as CMSampleBuffer; if (sbuf == null) { #if DEBUG // Record the current sampleBuffer.ClassHandle // Then run another iteration and on the next one, print the ClassHandle Console.WriteLine ("The type is {0}", j.ToString()); #endif return; } using (CVImageBuffer pixBuf = sbuf.GetImageBuffer ()) { if (PixelBufferReadyForDisplay != null) PixelBufferReadyForDisplay (pixBuf); } }); } // keep a reference to 'sampleBuffer', movieWritingQueue will remove it CompleteBufferUse (sampleBuffer); movieWritingQueue.DispatchAsync (() => { if (assetWriter != null) { bool wasReadyToRecord = (readyToRecordAudio && readyToRecordVideo); if (connection == videoConnection) { // Initialize the video input if this is not done yet if (!readyToRecordVideo) readyToRecordVideo = SetupAssetWriterVideoInput (formatDescription); // Write the video data to file if (readyToRecordVideo && readyToRecordAudio) { WriteSampleBuffer (sampleBuffer, AVMediaType.Video); } } else if (connection == audioConnection) { if (!readyToRecordAudio) readyToRecordAudio = SetupAssetWriterAudioInput (formatDescription); if (readyToRecordAudio && readyToRecordVideo) WriteSampleBuffer (sampleBuffer, AVMediaType.Audio); } bool isReadyToRecord = (readyToRecordAudio && readyToRecordVideo); if (!wasReadyToRecord && isReadyToRecord) { recordingWillBeStarted = false; IsRecording = true; if (RecordingDidStart != null) RecordingDidStart (); } } CompleteBufferUse (sampleBuffer); }); }
public override void DidDropSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { // sampleBuffer.Dispose(); }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { lastRunTime = DateTimeOffset.Now.ToUnixTimeMilliseconds(); if (lastRunTime - lastAnalysisTime > scanIntervalInMs && Configuration.IsScanning) { lastAnalysisTime = lastRunTime; try { UIImage image = GetImageFromSampleBuffer(sampleBuffer); var visionImage = new VisionImage(image); visionImage.Metadata = metadata; releaseSampleBuffer(sampleBuffer); DetectBarcodeActionAsync(visionImage); } catch { } finally { } } releaseSampleBuffer(sampleBuffer); }
public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { if (OnCapture != null && metadataObjects != null) { OnCapture(metadataObjects); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; using (var image = ImageFromSampleBuffer(sampleBuffer)){ if (_CurrentState.didKnock) { KnockCount++; if (KnockCount == 1) { _CurrentState.CountDown = 5; InvokeOnMainThread(delegate { _CurrentState.TopLabel.Text = "Knock Again to Post!!"; _CurrentState.BottomLabel.Text = "Knock to Post: 5 sec"; }); } else if (KnockCount == 40) { _CurrentState.CountDown = 4; InvokeOnMainThread(delegate { _CurrentState.BottomLabel.Text = "Knock to Post: 4 sec"; }); } else if (KnockCount == 80) { _CurrentState.CountDown = 3; InvokeOnMainThread(delegate { _CurrentState.BottomLabel.Text = "Knock to Post: 3 sec"; }); } else if (KnockCount == 120) { _CurrentState.CountDown = 2; InvokeOnMainThread(delegate { _CurrentState.BottomLabel.Text = "Knock to Post: 2 sec"; }); } else if (KnockCount == 160) { _CurrentState.CountDown = 1; InvokeOnMainThread(delegate { _CurrentState.BottomLabel.Text = "Knock to Post: 1 sec"; }); } else if (KnockCount > 200) { InvokeOnMainThread(delegate { _CurrentState.TopLabel.Text = "Follow @JoesDoor on Twitter"; _CurrentState.BottomLabel.Text = "Knock to take a photo"; }); KnockCount = 0; _CurrentState.CountDown = 0; _CurrentState.didKnock = false; } } else { InvokeOnMainThread(delegate { using (var pool = new NSAutoreleasePool()) { _CurrentState.DisplayImageView.Image = image; } }); } } sampleBuffer.Dispose(); } catch (Exception e) { Console.WriteLine(e); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CoreMedia.CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { }
public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { foreach (var m in metadataObjects) { var avmmrcobj_readable = (AVMetadataMachineReadableCodeObject)m; if (avmmrcobj_readable.StringValue != str_previous_scanned) { str_previous_scanned = avmmrcobj_readable.StringValue; lab_Result_scanned.Text = str_previous_scanned; } } }
public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { // resetEvent is used to drop new notifications if old ones are still processing, to avoid queuing up a bunch of stale data. //★★★, 20180831, hm.ji, 연속 스캔하기 위해서는 아래 값을 0으로 반듯이 해야 한다. if (this.resetEvent.WaitOne(0)) { DispatchQueue.MainQueue.DispatchAsync(() => { this.RemoveMetadataObjectOverlayLayers(); this.AddMetadataOverlayLayers(metadataObjects.Select(this.CreateMetadataOverlay)); //OnScanCompleted?.Invoke("EXIT"); //DismissViewController(true, null); //if (this.AllScanBarcode.Count == this.SaveCompletedBarcode.Count + this.ScanCompletedBarcode.Count) //{ // Task.Delay(500).Wait(); //} //else //{ // //연속스캔 사이의 간격 지정 //Task.Delay(1000).ContinueWith((t) => resetEvent.Set()); //Thread.Sleep(1000); //} //Task.Delay(1000).Wait(); resetEvent.Set(); }); } }
public virtual void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection avConnection) { CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer(); var pixelBuffer = imageBuffer as CVPixelBuffer; var bufferSize = pixelBuffer.Height * pixelBuffer.BytesPerRow; if (bytes.Length != bufferSize) { bytes = new byte[bufferSize]; } pixelBuffer.Lock(CVPixelBufferLock.None); Marshal.Copy(pixelBuffer.BaseAddress, bytes, 0, bytes.Length); pixelBuffer.Unlock(CVPixelBufferLock.None); var image = SixLabors.ImageSharp.Image .LoadPixelData <SixLabors.ImageSharp.PixelFormats.Rgb24>( SixLabors.ImageSharp.Configuration.Default, bytes, (int)pixelBuffer.Width, (int)pixelBuffer.Height); string asciiImage = ImageConverter.ImageToAsciiArt(image); connection.InvokeAsync("SendFrame", asciiImage); }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { UIImage image = ImageFromSampleBuffer(sampleBuffer); // Do something with the image, we just stuff it in our main view. BeginInvokeOnMainThread(delegate { if (_imageView.Frame.Size != image.Size) { _imageView.Frame = new CGRect(CGPoint.Empty, image.Size); } _imageView.Image = image; }); // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose(); } catch (Exception e) { Console.WriteLine(e); } }
/// <summary> /// Captures the image with metadata. /// </summary> /// <returns>The image with metadata.</returns> /// <param name="captureStillImageOutput">Capture still image output.</param> /// <param name="connection">Connection.</param> private async Task CaptureImageWithMetadata(AVCaptureStillImageOutput captureStillImageOutput, AVCaptureConnection connection) { var sampleBuffer = await captureStillImageOutput.CaptureStillImageTaskAsync(connection); var imageData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer); var image = UIImage.LoadFromData(imageData); RotateImage(ref image); try { byte[] imgData = image.AsJPEG().ToArray(); if (Photo != null) { Photo(this, imgData); } } catch (Exception error) { _log.WriteLineTime(_tag + "\n" + "CaptureImageWithMetadata() Failed to take photo \n " + "ErrorMessage: \n" + error.Message + "\n" + "Stacktrace: \n " + error.StackTrace); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { if ((DateTime.UtcNow - lastAnalysis).TotalMilliseconds < options.DelayBetweenAnalyzingFrames || working || CancelTokenSource.IsCancellationRequested) { if (sampleBuffer != null) { sampleBuffer.Dispose(); sampleBuffer = null; } return; } working = true; //Console.WriteLine("SAMPLE"); lastAnalysis = DateTime.UtcNow; try { using (var image = ImageFromSampleBuffer(sampleBuffer)) HandleImage(image); // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose(); sampleBuffer = null; } catch (Exception e) { Console.WriteLine(e); } working = false; }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, MonoTouch.CoreMedia.CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer){ int width = pixelBuffer.Width; int height = pixelBuffer.Height; if (container.ripple == null || width != textureWidth || height != textureHeight) { textureWidth = width; textureHeight = height; container.SetupRipple(textureWidth, textureHeight); } CleanupTextures(); // Y-plane GL.ActiveTexture(All.Texture0); All re = (All)0x1903; // GL_RED_EXT, RED component from ARB OpenGL extension CVReturn status; lumaTexture = container.videoTextureCache.TextureFromImage(pixelBuffer, true, re, textureWidth, textureHeight, re, DataType.UnsignedByte, 0, out status); if (lumaTexture == null) { Console.WriteLine("Error creating luma texture: {0}", status); return; } GL.BindTexture((All)lumaTexture.Target, lumaTexture.Name); GL.TexParameter(All.Texture2D, All.TextureWrapS, (int)All.ClampToEdge); GL.TexParameter(All.Texture2D, All.TextureWrapT, (int)All.ClampToEdge); // UV Plane GL.ActiveTexture(All.Texture1); re = (All)0x8227; // GL_RG_EXT, RED GREEN component from ARB OpenGL extension chromaTexture = container.videoTextureCache.TextureFromImage(pixelBuffer, true, re, textureWidth / 2, textureHeight / 2, re, DataType.UnsignedByte, 1, out status); if (chromaTexture == null) { Console.WriteLine("Error creating chroma texture: {0}", status); return; } GL.BindTexture((All)chromaTexture.Target, chromaTexture.Name); GL.TexParameter(All.Texture2D, All.TextureWrapS, (int)All.ClampToEdge); GL.TexParameter(All.Texture2D, All.TextureWrapT, (int)All.ClampToEdge); } } finally { sampleBuffer.Dispose(); } }
public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { if (!barcodeScanner.BarcodeDecoder) { return; } foreach (var metadata in metadataObjects) { barcodeScanner.Barcode = new RebuyBarcode( ((AVMetadataMachineReadableCodeObject)metadata).StringValue, metadata.Type.ConvertToPcl() ); return; } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds; if (msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenAnalyzingFrames || (wasScanned && msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenContinuousScans) || working || CancelTokenSource.IsCancellationRequested) { if (msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenAnalyzingFrames) { Console.WriteLine("Too soon between frames"); } if (wasScanned && msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenContinuousScans) { Console.WriteLine("Too soon since last scan"); } if (sampleBuffer != null) { sampleBuffer.Dispose(); sampleBuffer = null; } return; } wasScanned = false; working = true; lastAnalysis = DateTime.UtcNow; try { // Get the CoreVideo image using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { // Lock the base address pixelBuffer.Lock(CVPixelBufferLock.ReadOnly); // MAYBE NEEDS READ/WRITE if (Picture) { Picture = false; CapturedImage = null; var baseAddress = pixelBuffer.BaseAddress; nint bytesPerRow = pixelBuffer.BytesPerRow; nint width = pixelBuffer.Width; nint height = pixelBuffer.Height; var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little; // Create a CGImage on the RGB colorspace from the configured parameter above using (var cs = CGColorSpace.CreateDeviceRGB()) using (var context = new CGBitmapContext(baseAddress, width, height, 8, bytesPerRow, cs, flags)) using (var cgImage = context.ToImage()) { pixelBuffer.Unlock(CVPixelBufferLock.None); CapturedImage = UIImage.FromImage(cgImage); //SendPictureBack?.Invoke(this, CapturedImage); HandleCapturedImage(CapturedImage); } } LuminanceSource luminanceSource; // Let's access the raw underlying data and create a luminance source from it unsafe { var rawData = (byte *)pixelBuffer.BaseAddress.ToPointer(); var rawDatalen = (int)(pixelBuffer.Height * pixelBuffer.Width * 4); //This drops 8 bytes from the original length to give us the expected length luminanceSource = new CVPixelBufferBGRA32LuminanceSource(rawData, rawDatalen, (int)pixelBuffer.Width, (int)pixelBuffer.Height); } if (HandleImage(luminanceSource)) { wasScanned = true; } pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly); } // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose(); sampleBuffer = null; } catch (Exception e) { Console.WriteLine(e); } finally { working = false; } }
public void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] faces, AVCaptureConnection connection) { List <int> unseen = faceViews.Keys.ToList(); List <int> seen = new List <int> (); CATransaction.Begin(); CATransaction.SetValueForKey(NSObject.FromObject(true), (NSString)(CATransaction.DisableActions.ToString())); foreach (var face in faces) { // HACK: int faceId = (face as AVMetadataFaceObject).FaceID; int faceId = (int)(face as AVMetadataFaceObject).FaceID; unseen.Remove(faceId); seen.Add(faceId); FaceView view; if (faceViews.ContainsKey(faceId)) { view = faceViews [faceId]; } else { view = new FaceView(); view.Layer.CornerRadius = 10; view.Layer.BorderWidth = 3; view.Layer.BorderColor = UIColor.Green.CGColor; previewView.AddSubview(view); faceViews.Add(faceId, view); view.Id = faceId; view.Callback = TouchCallBack; if (lockedFaceID != null) { view.Alpha = 0; } } AVMetadataFaceObject adjusted = (AVMetadataFaceObject)(previewView.Layer as AVCaptureVideoPreviewLayer).GetTransformedMetadataObject(face); view.Frame = adjusted.Bounds; } foreach (int faceId in unseen) { FaceView view = faceViews [faceId]; view.RemoveFromSuperview(); faceViews.Remove(faceId); if (faceId == lockedFaceID) { clearLockedFace(); } } if (lockedFaceID != null) { FaceView view = faceViews [lockedFaceID.GetValueOrDefault()]; // HACK: Cast resulting nfloat to float // float size = (float)Math.Max (view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor; float size = (float)(Math.Max(view.Frame.Size.Width, view.Frame.Size.Height) / device.VideoZoomFactor); float zoomDelta = lockedFaceSize / size; float lockTime = (float)(CATransition.CurrentMediaTime() - this.lockTime); float zoomRate = (float)(Math.Log(zoomDelta) / lockTime); if (Math.Abs(zoomDelta) > 0.1) { device.RampToVideoZoom(zoomRate > 0 ? MaxZoom : 1, zoomRate); } } CATransaction.Commit(); }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { if (_camera.BluetoothOperator.PairStatus == PairStatus.Connected) { if (Interlocked.Exchange(ref _readyToCapturePreviewFrameInterlocked, 0) == 1) { var image = GetImageFromSampleBuffer(sampleBuffer); var bytes = image.AsJPEG(0).ToArray(); _camera.BluetoothOperator.SendLatestPreviewFrame(bytes); } } } catch (Exception e) { Console.WriteLine("Error sampling buffer: {0}", e.Message); } finally { sampleBuffer.Dispose(); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { var image = ImageFromSampleBuffer(sampleBuffer); // Do something with the image, we just stuff it in our main view. AppDelegate.ImageView.BeginInvokeOnMainThread(delegate { AppDelegate.ImageView.Image = image; }); sampleBuffer.Dispose(); } catch (Exception e) { Console.WriteLine(e); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { if (BufferReceived != null) { BufferReceived(this, new BufferReceivedEventArgs(sampleBuffer)); } sampleBuffer.Dispose(); }
private void updatePreviewLayer(AVCaptureConnection layer, AVCaptureVideoOrientation orientation) { layer.VideoOrientation = orientation; previewLayer.Frame = Bounds; }
public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection) { string text = string.Empty; for (int i = 0; i < metadataObjects.Length; i++) { AVMetadataObject aVMetadataObject = metadataObjects[i]; AVMetadataMachineReadableCodeObject aVMetadataMachineReadableCodeObject = aVMetadataObject as AVMetadataMachineReadableCodeObject; if (aVMetadataMachineReadableCodeObject != null) { AVMetadataMachineReadableCodeObject aVMetadataMachineReadableCodeObject2 = aVMetadataMachineReadableCodeObject; text = aVMetadataMachineReadableCodeObject2.StringValue; text = this._view.ScannedBarcode(text); if (!string.IsNullOrEmpty(text)) { int num = this.Buffer.Add(text); if (num > 0) { this._view.ScanOccurred(text); } } } else { iApp.Log.Info("Invalid AVMetadataObject type: " + aVMetadataObject.Type.ToString(), new object[0]); } } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds; if (msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenAnalyzingFrames || (wasScanned && msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenContinuousScans) || working || CancelTokenSource.IsCancellationRequested) { if (msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenAnalyzingFrames) { Console.WriteLine("Too soon between frames"); } if (wasScanned && msSinceLastPreview < scannerHost.ScanningOptions.DelayBetweenContinuousScans) { Console.WriteLine("Too soon since last scan"); } if (sampleBuffer != null) { sampleBuffer.Dispose(); sampleBuffer = null; } return; } wasScanned = false; working = true; lastAnalysis = DateTime.UtcNow; try { // Get the CoreVideo image using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { // Lock the base address pixelBuffer.Lock(CVPixelBufferLock.ReadOnly); // MAYBE NEEDS READ/WRITE LuminanceSource luminanceSource; // Let's access the raw underlying data and create a luminance source from it unsafe { var rawData = (byte *)pixelBuffer.BaseAddress.ToPointer(); var rawDatalen = (int)(pixelBuffer.Height * pixelBuffer.Width * 4); //This drops 8 bytes from the original length to give us the expected length luminanceSource = new CVPixelBufferBGRA32LuminanceSource(rawData, rawDatalen, (int)pixelBuffer.Width, (int)pixelBuffer.Height); } if (HandleImage(luminanceSource)) { wasScanned = true; } pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly); } // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose(); sampleBuffer = null; } catch (Exception e) { Console.WriteLine(e); } finally { working = false; } }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { UIImage image = ImageFromSampleBuffer (sampleBuffer); // Do something with the image, we just stuff it in our main view. BeginInvokeOnMainThread (delegate { if (_imageView.Frame.Size != image.Size) _imageView.Frame = new CGRect(CGPoint.Empty, image.Size); _imageView.Image = image; }); // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose (); } catch (Exception e){ Console.WriteLine (e); } }