public void DidFinishProcessingPhoto (AVCapturePhotoOutput captureOutput, CMSampleBuffer photoSampleBuffer, CMSampleBuffer previewPhotoSampleBuffer, AVCaptureResolvedPhotoSettings resolvedSettings, AVCaptureBracketedStillImageSettings bracketSettings, NSError error) { if (photoSampleBuffer != null) photoData = AVCapturePhotoOutput.GetJpegPhotoDataRepresentation (photoSampleBuffer, previewPhotoSampleBuffer); else Console.WriteLine ($"Error capturing photo: {error.LocalizedDescription}"); }
public void AddSampleBuffer (CMSampleBuffer sampleBuffer) { var stopwatch = new Stopwatch(); stopwatch.Start(); using (var image = CreateImage (sampleBuffer)) { var imageRect = new Rectangle (0, 0, image.Width, image.Height); var maskRects = new List<RectangleF> (); var maskRect = new Rectangle (stripeSize.Width * stripeIndex, 0, stripeSize.Width, stripeSize.Height); while (maskRect.X < imageSize.Width) { maskRects.Add (maskRect); maskRect.X += stripeSize.Width * stride; } renderContext.SaveState (); renderContext.ClipToRects (maskRects.ToArray ()); renderContext.DrawImage (imageRect, image); renderContext.RestoreState (); } stopwatch.Stop(); Console.WriteLine ("Render time for contributor {0}: {1} msec", stripeIndex, stopwatch.Elapsed); stripeIndex = (stripeIndex + 1) % stride; }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; using (var image = ImageFromSampleBuffer (sampleBuffer)){ if(_CurrentState.didKnock){ KnockCount++; if(KnockCount==1){ _CurrentState.CountDown = 5; InvokeOnMainThread (delegate { _CurrentState.TopLabel.Text = "Knock Again to Post!!"; _CurrentState.BottomLabel.Text = "Knock to Post: 5 sec"; }); }else if(KnockCount==40){ _CurrentState.CountDown = 4; InvokeOnMainThread (delegate { _CurrentState.BottomLabel.Text = "Knock to Post: 4 sec"; }); }else if(KnockCount==80){ _CurrentState.CountDown = 3; InvokeOnMainThread (delegate { _CurrentState.BottomLabel.Text = "Knock to Post: 3 sec"; }); }else if(KnockCount==120){ _CurrentState.CountDown = 2; InvokeOnMainThread (delegate { _CurrentState.BottomLabel.Text = "Knock to Post: 2 sec"; }); }else if(KnockCount==160){ _CurrentState.CountDown = 1; InvokeOnMainThread (delegate { _CurrentState.BottomLabel.Text = "Knock to Post: 1 sec"; }); }else if(KnockCount>200){ InvokeOnMainThread (delegate { _CurrentState.TopLabel.Text = "Follow @JoesDoor on Twitter"; _CurrentState.BottomLabel.Text = "Knock to take a photo"; }); KnockCount=0; _CurrentState.CountDown = 0; _CurrentState.didKnock=false; } }else{ InvokeOnMainThread(delegate { using (var pool = new NSAutoreleasePool ()) { _CurrentState.DisplayImageView.Image = image; } }); } } sampleBuffer.Dispose (); } catch (Exception e){ Console.WriteLine (e); } }
private UIImage getImageFromSampleBuffer (CMSampleBuffer sampleBuffer) { // Get the CoreVideo image using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { // Lock the base address pixelBuffer.Lock (0); // Get the number of bytes per row for the pixel buffer var baseAddress = pixelBuffer.BaseAddress; int bytesPerRow = (int) pixelBuffer.BytesPerRow; int width = (int) pixelBuffer.Width; int height = (int) pixelBuffer.Height; var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little; // Create a CGImage on the RGB colorspace from the configured parameter above using (var cs = CGColorSpace.CreateDeviceRGB ()) using (var context = new CGBitmapContext (baseAddress,width, height, 8, bytesPerRow, cs, (CGImageAlphaInfo) flags)) using (var cgImage = context.ToImage ()) { pixelBuffer.Unlock (0); return UIImage.FromImage (cgImage); } } }
/// <summary> /// Gets a single image frame from sample buffer. /// </summary> /// <returns>The image from sample buffer.</returns> /// <param name="sampleBuffer">Sample buffer.</param> private UIImage GetImageFromSampleBuffer(CMSampleBuffer sampleBuffer) { // Get a pixel buffer from the sample buffer using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer) { // Lock the base address pixelBuffer.Lock ((CVPixelBufferLock)0); // Prepare to decode buffer var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little; // Decode buffer - Create a new colorspace using (var cs = CGColorSpace.CreateDeviceRGB ()) { // Create new context from buffer using (var context = new CGBitmapContext (pixelBuffer.BaseAddress, pixelBuffer.Width, pixelBuffer.Height, 8, pixelBuffer.BytesPerRow, cs, (CGImageAlphaInfo)flags)) { // Get the image from the context using (var cgImage = context.ToImage ()) { // Unlock and return image pixelBuffer.Unlock ((CVPixelBufferLock)0); return UIImage.FromImage (cgImage); } } } } }
/// <Docs>The capture output on which the frame was captured.</Docs> /// <param name="connection">The connection on which the video frame was received.</param> /// <remarks>Unless you need to keep the buffer for longer, you must call /// Dispose() on the sampleBuffer before returning. The system /// has a limited pool of video frames, and once it runs out of /// those buffers, the system will stop calling this method /// until the buffers are released.</remarks> /// <summary> /// Dids the output sample buffer. /// </summary> /// <param name="captureOutput">Capture output.</param> /// <param name="sampleBuffer">Sample buffer.</param> public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { // Trap all errors try { // Grab an image from the buffer var image = GetImageFromSampleBuffer(sampleBuffer); // Display the image if (DisplayView != null) { DisplayView.BeginInvokeOnMainThread(() => { // Set the image DisplayView.Image = image; // Rotate image to the correct display orientation //DisplayView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2); DisplayView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2); }); } // IMPORTANT: You must release the buffer because AVFoundation has a fixed number // of buffers and will stop delivering frames if it runs out. sampleBuffer.Dispose(); } catch (Exception e) { // Report error Console.WriteLine("Error sampling buffer: {0}", e.Message); } }
protected override bool Append (CMSampleBuffer sampleBuffer) { CMTime presentationTime = sampleBuffer.PresentationTimeStamp; using (CVPixelBuffer writerBuffer = adaptor.PixelBufferPool.CreatePixelBuffer ()) { // Grab the pixel buffer from the sample buffer, if possible using (CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer ()) { var pixelBuffer = (CVPixelBuffer)imageBuffer; if (pixelBuffer != null) transformer.AdjustPixelBuffer (pixelBuffer, writerBuffer); } return adaptor.AppendPixelBufferWithPresentationTime (writerBuffer, presentationTime); } }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { // render the image into the debug preview pane UIImage image = getImageFromSampleBuffer (sampleBuffer); // event the capture up OnImageCaptured (image); // make sure AVFoundation does not run out of buffers sampleBuffer.Dispose (); } catch (Exception ex) { string exceptionText = ErrorHandling.GetExceptionDetailedText (ex); string errorMessage = $"Failed to process image capture: {exceptionText}"; OnCaptureError (errorMessage); } }
/// <summary> /// Gets called by the video session if a new image is available. /// </summary> /// <param name="captureOutput">Capture output.</param> /// <param name="sampleBuffer">Sample buffer.</param> /// <param name="connection">Connection.</param> public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { // Convert the raw image data into a CGImage. using(CGImage sourceImage = GetImageFromSampleBuffer(sampleBuffer)) { this.OnImageCaptured( sourceImage ); } // Make sure AVFoundation does not run out of buffers sampleBuffer.Dispose (); } catch (Exception ex) { string errorMessage = string.Format("Failed to process image capture: {0}", ex); this.OnCaptureError( errorMessage, ex ); } }
UIImage ImageFromSampleBuffer(CMSampleBuffer sampleBuffer) { // Get the CoreVideo image using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer){ // Lock the base address pixelBuffer.Lock(0); // Get the number of bytes per row for the pixel buffer var baseAddress = pixelBuffer.BaseAddress; int bytesPerRow = pixelBuffer.BytesPerRow; int width = pixelBuffer.Width; int height = pixelBuffer.Height; var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little; // Create a CGImage on the RGB colorspace from the configured parameter above using (var cs = CGColorSpace.CreateDeviceRGB()) using (var context = new CGBitmapContext(baseAddress, width, height, 8, bytesPerRow, cs, (CGImageAlphaInfo)flags)) using (var cgImage = context.ToImage()){ pixelBuffer.Unlock(0); return(UIImage.FromImage(cgImage)); } } }
async void TakePhotoButtonTapped(object sender, EventArgs e) { Debug.WriteLine("DHB:CameraService:TakePhotoButtonTapped"); try { AVCaptureConnection videoConnection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video); CMSampleBuffer sampleBuffer = await stillImageOutput.CaptureStillImageTaskAsync(videoConnection); NSData jpegImageAsNsData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer); byte[] jpegAsByteArray = jpegImageAsNsData.ToArray(); //GlobalSingletonHelpers.readExifOrientation(jpegAsByteArray); //Debug.WriteLine("DHB:CameraServices_iOS:TakePhotoButton:TakePhotoButtonTapped orientation:" + UIDevice.CurrentDevice.Orientation); GlobalStatusSingleton.mostRecentImgBytes = jpegAsByteArray; exitFromPhoto = true; if (FinishedPickingMedia != null) { FinishedPickingMedia(this, e); } } catch (Exception err) { Debug.WriteLine("DHB:Exception " + err.ToString()); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { if (ready) { ready = false; pixelBuffer = (CVPixelBuffer)sampleBuffer.GetImageBuffer(); pixelBuffer.Lock(CVPixelBufferLock.None); width = pixelBuffer.Width; height = pixelBuffer.Height; bytesPerRow = pixelBuffer.BytesPerRow; context = new CGBitmapContext(pixelBuffer.BaseAddress, width, height, 8, bytesPerRow, colorSpace, CGImageAlphaInfo.PremultipliedFirst); cgImage = context.ToImage(); uiImage = new UIImage(cgImage); pixelBuffer.Unlock(CVPixelBufferLock.None); pixelBuffer.Dispose(); queue.DispatchAsync(ReadTask); } sampleBuffer.Dispose(); }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { //process any immediate request first, and dont count it against the continual frame count if (needNextFrame) { using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { nextFrameHandler?.Invoke(pixelBuffer); } needNextFrame = false; nextFrameHandler = null; } else if (framesNeedHandling) { if (framesSinceLastHandling % framesPerHandling == 0) { using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { frameHandler?.Invoke(pixelBuffer); } framesSinceLastHandling = 1; } else { framesSinceLastHandling++; } } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("**Error: Invoking Handler - " + ex.Message); } finally { sampleBuffer.Dispose(); } }
public override void DidOutputSampleBuffer( AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { using (var uiImage = GetUIImage(sampleBuffer)) { if (uiImage == null) { return; } FrameCaptured(this, new EventArgsT <UIImage>(uiImage)); } } finally { sampleBuffer.Dispose(); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { // ここでフレーム画像を取得していろいろしたり //var image = GetImageFromSampleBuffer (sampleBuffer); //PCLプロジェクトとのやりとりやら Camera.Hoge = (object)(this._FrameCount++.ToString()); //変更した画像をプレビューに反映させたりする //これがないと"Received memory warning." で落ちたり、画面の更新が止まったりする GC.Collect(); // "Received memory warning." 回避 } catch (Exception e) { Console.WriteLine("Error sampling buffer: {0}", e.Message); } }
// ReSharper disable once UnusedMember.Local private void PhotoCaptureComplete(AVCapturePhotoOutput captureOutput, CMSampleBuffer finishedPhotoBuffer, CMSampleBuffer previewPhotoBuffer, AVCaptureResolvedPhotoSettings resolvedSettings, AVCaptureBracketedStillImageSettings bracketSettings, NSError error) { try { if (error != null) { _cameraModule.ErrorMessage = error.ToString(); } else if (finishedPhotoBuffer != null) { if (!(_cameraModule.BluetoothOperator.PairStatus == PairStatus.Connected && !_cameraModule.BluetoothOperator.IsPrimary)) { LockPictureSpecificSettingsIfApplicable(); } using (var image = AVCapturePhotoOutput.GetJpegPhotoDataRepresentation(finishedPhotoBuffer, previewPhotoBuffer)) using (var imgDataProvider = new CGDataProvider(image)) using (var cgImage = CGImage.FromJPEG(imgDataProvider, null, false, CGColorRenderingIntent.Default)) using (var uiImage = UIImage.FromImage(cgImage, 1, GetOrientationForCorrection())) { var imageBytes = uiImage.AsJPEG().ToArray(); if (_cameraModule.BluetoothOperator.PairStatus == PairStatus.Connected && !_cameraModule.BluetoothOperator.IsPrimary) { _cameraModule.BluetoothOperator.SendCapture(imageBytes); } else { _cameraModule.CapturedImage = imageBytes; } } } } catch (Exception e) { _cameraModule.ErrorMessage = e.ToString(); } }
public void SendVideBuffer(CMSampleBuffer sampleBuffer) { var videoFrame = sampleBuffer.GetImageBuffer(); var rotation = 0; var orientationAttachment = sampleBuffer.GetAttachment <NSNumber>((CFString)ReplayKit.RPBroadcastSampleHandler.VideoSampleOrientationKey.ToString(), out CMAttachmentMode mode); switch (orientationAttachment.UInt32Value) { case 1: //up case 2: //upMirrored rotation = 0; break; case 3: //down case 4: //downMirrored rotation = 180; break; case 8: //left case 5: //leftMirrored rotation = 90; break; case 6: //right case 7: //rightMirrored rotation = 270; break; } var time = new CMTime((long)CAAnimation.CurrentMediaTime(), 1000); var frame = new AgoraVideoFrame { Format = 12, Time = time, TextureBuf = videoFrame.Handle, Rotation = rotation }; _sharedAgoraEngine.PushExternalVideoFrame(frame); }
public virtual void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { CVPixelBuffer pixelBuffer = null; VNImageRequestHandler imageRequestHandler = null; try { pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer; if (pixelBuffer == null) { return; } // TODO See if this causes issues disposing directly after bufferOutputhandler.Invoke(pixelBuffer); } catch (Exception x) { Console.WriteLine(x.Message); } finally { if (sampleBuffer != null) { sampleBuffer.Dispose(); } if (pixelBuffer != null) { pixelBuffer.Dispose(); } if (imageRequestHandler != null) { imageRequestHandler.Dispose(); } } }
void WriteSampleBuffer(CMSampleBuffer sampleBuffer, NSString mediaType) { if (assetWriter.Status == AVAssetWriterStatus.Unknown) { if (assetWriter.StartWriting()) { assetWriter.StartSessionAtSourceTime(sampleBuffer.OutputPresentationTimeStamp); } else { ShowError(assetWriter.Error); } } if (assetWriter.Status == AVAssetWriterStatus.Writing) { if (mediaType == AVMediaType.Video) { if (assetWriterVideoIn.ReadyForMoreMediaData) { if (!assetWriterVideoIn.AppendSampleBuffer(sampleBuffer)) { ShowError(assetWriter.Error); } } } else if (mediaType == AVMediaType.Audio) { if (assetWriterAudioIn.ReadyForMoreMediaData) { if (!assetWriterAudioIn.AppendSampleBuffer(sampleBuffer)) { ShowError(assetWriter.Error); } } } } }
/// <summary> /// Converts raw image data from a CMSampleBugger into a CGImage. /// </summary> /// <returns>The image from sample buffer.</returns> /// <param name="sampleBuffer">Sample buffer.</param> static CGImage GetImageFromSampleBuffer(CMSampleBuffer sampleBuffer) { // Get the CoreVideo image using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { pixelBuffer.Lock(CVPixelBufferLock.None); var baseAddress = pixelBuffer.BaseAddress; int bytesPerRow = (int)pixelBuffer.BytesPerRow; int width = (int)pixelBuffer.Width; int height = (int)pixelBuffer.Height; var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little; // Create a CGImage on the RGB colorspace from the configured parameter above using (var cs = CGColorSpace.CreateDeviceRGB()) using (var context = new CGBitmapContext(baseAddress, width, height, 8, bytesPerRow, cs, (CGImageAlphaInfo)flags)) { var cgImage = context.ToImage(); pixelBuffer.Unlock(CVPixelBufferLock.None); return(cgImage); } } }
void HandleAVCaptureCompletionHandlercompletionHandler(CMSampleBuffer imageDataSampleBuffer, NSError error) { try { using (var pool = new NSAutoreleasePool()) { imageData = AVCaptureStillImageOutput.JpegStillToNSData(imageDataSampleBuffer); //imageDataSampleBuffer.Dispose(); // parentView.DismissModalViewControllerAnimated(true); // parentView.HandlePickedImage(imageData); // session.StopRunning(); var iamge = UIImage.LoadFromData(imageData); InvokeOnMainThread(delegate { var imageView = new UIImageView(new RectangleF(0, 0, this.View.Frame.Size.Width, this.View.Frame.Size.Height)); imageView.Image = iamge; this.View.AddSubview(imageView); }); } } catch (Exception exc) { Console.WriteLine(exc); } }
public void CallForEachSample() { using (var pixelBuffer = new CVPixelBuffer(20, 10, CVPixelFormatType.CV24RGB)) { CMFormatDescriptionError fde; using (var desc = CMVideoFormatDescription.CreateForImageBuffer(pixelBuffer, out fde)) { var sampleTiming = new CMSampleTimingInfo(); CMSampleBufferError sbe; using (var sb = CMSampleBuffer.CreateForImageBuffer(pixelBuffer, true, desc, sampleTiming, out sbe)) { int i = 0; var result = sb.CallForEachSample(delegate(CMSampleBuffer buffer, int index) { i++; Assert.AreSame(buffer, sb, "same-1"); return(CMSampleBufferError.CannotSubdivide); }); Assert.That(result, Is.EqualTo(CMSampleBufferError.CannotSubdivide), "custom error"); Assert.That(i, Is.EqualTo(1), "1"); Assert.Throws <ArgumentNullException> (delegate { sb.CallForEachSample(null); }, "null"); } } } }
public override void DidOutputSampleBuffer ( AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection ) { try { // render the image into the debug preview pane UIImage image = getImageFromSampleBuffer(sampleBuffer); // event the capture up onImageCaptured(image); // make sure AVFoundation does not run out of buffers sampleBuffer.Dispose(); } catch (Exception ex) { string errorMessage = string.Format("Failed to process image capture: {0}", ErrorHandling.GetExceptionDetailedText(ex)); onCaptureError(errorMessage); } }
private void OnImageCaptured( CMSampleBuffer buffer, NSError error, Action <byte[]> imageCaptured, Action <string> captureFailed) { if (error != null) { captureFailed(error.LocalizedDescription); } else { NSData data = AVCaptureStillImageOutput.JpegStillToNSData(buffer); byte[] image = new byte[data.Length]; Marshal.Copy( data.Bytes, image, 0, Convert.ToInt32(data.Length)); imageCaptured(image); } }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { var image = ImageFromSampleBuffer (sampleBuffer); // Do something with the image, we just stuff it in our main view. ImageView.BeginInvokeOnMainThread(() => { TryDispose (ImageView.Image); ImageView.Image = image; ImageView.Transform = CGAffineTransform.MakeRotation (NMath.PI / 2); }); } catch (Exception e) { Console.WriteLine (e); } finally { sampleBuffer.Dispose (); } }
void DidFinishProcessingRawPhoto (AVCapturePhotoOutput captureOutput, CMSampleBuffer rawSampleBuffer, CMSampleBuffer previewPhotoSampleBuffer, AVCaptureResolvedPhotoSettings resolvedSettings, AVCaptureBracketedStillImageSettings bracketSettings, NSError error) { if (rawSampleBuffer == null) { Console.WriteLine ($"Error occurred while capturing photo: {error}"); return; } var filePath = Path.Combine (Path.GetTempPath (), $"{resolvedSettings.UniqueID}.dng"); NSData imageData = AVCapturePhotoOutput.GetDngPhotoDataRepresentation (rawSampleBuffer, previewPhotoSampleBuffer); imageData.Save (filePath, true); PHPhotoLibrary.RequestAuthorization (status => { if (status == PHAuthorizationStatus.Authorized) { PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => { // In iOS 9 and later, it's possible to move the file into the photo library without duplicating the file data. // This avoids using double the disk space during save, which can make a difference on devices with limited free disk space. var options = new PHAssetResourceCreationOptions (); options.ShouldMoveFile = true; PHAssetCreationRequest.CreationRequestForAsset ().AddResource (PHAssetResourceType.Photo, filePath, options); // Add move (not copy) option }, (success, err) => { if (!success) Console.WriteLine ($"Error occurred while saving raw photo to photo library: {err}"); else Console.WriteLine ("Raw photo was saved to photo library"); NSError rErr; if (NSFileManager.DefaultManager.FileExists (filePath)) NSFileManager.DefaultManager.Remove (filePath, out rErr); }); } else { Console.WriteLine ("Not authorized to save photo"); } }); }
// This runs on the movieWritingQueue already public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { if (processor.assetWriter != null) { var formatDescription = sampleBuffer.GetFormatDescription(); bool wasReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo); // Initalize the audio input if this is not done yet if (!processor.readyToRecordAudio) { processor.readyToRecordAudio = SetupAssetWriterAudioInput(formatDescription); } // Write audio data to file if (processor.readyToRecordAudio && processor.readyToRecordVideo) { processor.WriteSampleBuffer(sampleBuffer, AVMediaType.Audio); } bool isReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo); if (!wasReadyToRecord && isReadyToRecord) { processor.recordingWillBeStarted = false; processor.IsRecording = true; if (processor.RecordingDidStart != null) { processor.RecordingDidStart(); } } } } finally { sampleBuffer.Dispose(); } }
public static void DidDropSampleBuffer(IAVCaptureAudioDataOutputSampleBufferDelegate This, AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { }
// This is used to solve the issue with the movieWriter queue and the DisplayPixelBuffer // thread not releasing CMSampleBuffers when void CompleteBufferUse (CMSampleBuffer buf) { lock (inuse){ if (inuse.Contains (buf)) { inuse.Remove (buf); buf.Dispose (); } else { inuse.Add (buf); } } }
private static double ProgressOfSampleBufferInTimeRange(CMSampleBuffer sampleBuffer, CMTimeRange timeRange) { CMTime progressTime = sampleBuffer.PresentationTimeStamp; progressTime = progressTime - timeRange.Start; CMTime sampleDuration = sampleBuffer.Duration; if (sampleDuration.IsNumeric) progressTime = progressTime + sampleDuration; return progressTime.Seconds / timeRange.Duration.Seconds; }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { var image = ImageFromSampleBuffer (sampleBuffer); // Do something with the image, we just stuff it in our main view. AppDelegate.ImageView.BeginInvokeOnMainThread (delegate { AppDelegate.ImageView.Image = image; }); // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose (); } catch (Exception e){ Console.WriteLine (e); } }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { if ((DateTime.UtcNow - lastAnalysis).TotalMilliseconds < options.DelayBetweenAnalyzingFrames || working || CancelTokenSource.IsCancellationRequested) return; working = true; //Console.WriteLine("SAMPLE"); lastAnalysis = DateTime.UtcNow; try { using (var image = ImageFromSampleBuffer (sampleBuffer)) HandleImage(image); // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose (); } catch (Exception e){ Console.WriteLine (e); } working = false; }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { lastSampleTime = sampleBuffer.PresentationTimeStamp; var image = ImageFromSampleBuffer(sampleBuffer); if (frame == 0) { writer.StartWriting(); writer.StartSessionAtSourceTime(lastSampleTime); frame = 1; } String infoString = ""; if (inputWriter.ReadyForMoreMediaData) { if (!inputWriter.AppendSampleBuffer(sampleBuffer)) { infoString = "Failed to append sample buffer"; } else { infoString = String.Format("{0} frames captured", frame++); } } else { infoString = "Writer not ready"; } ImageView.BeginInvokeOnMainThread(() => ImageView.Image = image); InfoLabel.BeginInvokeOnMainThread(() => InfoLabel.Text = infoString); } catch (Exception e) { Failure.Alert(e.Message); } finally { sampleBuffer.Dispose(); } }
public void DidDropSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { //Console.WriteLine("DROPPED"); }
void DidFinishProcessingPhoto (AVCapturePhotoOutput captureOutput, CMSampleBuffer photoSampleBuffer, CMSampleBuffer previewPhotoSampleBuffer, AVCaptureResolvedPhotoSettings resolvedSettings, AVCaptureBracketedStillImageSettings bracketSettings, NSError error) { if (photoSampleBuffer == null) { Console.WriteLine ($"Error occurred while capturing photo: {error}"); return; } NSData imageData = AVCapturePhotoOutput.GetJpegPhotoDataRepresentation (photoSampleBuffer, previewPhotoSampleBuffer); PHPhotoLibrary.RequestAuthorization (status => { if (status == PHAuthorizationStatus.Authorized) { PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => { PHAssetCreationRequest.CreationRequestForAsset ().AddResource (PHAssetResourceType.Photo, imageData, null); }, (success, err) => { if (!success) { Console.WriteLine ($"Error occurred while saving photo to photo library: {err}"); } else { Console.WriteLine ("Photo was saved to photo library"); } }); } else { Console.WriteLine ("Not authorized to save photo"); } }); }
private CGImage CreateImage (CMSampleBuffer sampleBuffer) { CGImage image = null; CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription (); var subType = formatDescription.MediaSubType; CMBlockBuffer blockBuffer = sampleBuffer.GetDataBuffer (); if (blockBuffer != null) { if (subType != (int)CMVideoCodecType.JPEG) throw new Exception ("Block buffer must be JPEG encoded."); var jpegData = new NSMutableData (); jpegData.Length = blockBuffer.DataLength; blockBuffer.CopyDataBytes (0, blockBuffer.DataLength, jpegData.Bytes); using (var imageSource = CGImageSource.FromData (jpegData)) { var decodeOptions = new CGImageOptions { ShouldAllowFloat = false, ShouldCache = false }; image = imageSource.CreateImage (0, decodeOptions); } } else { if (subType != (int)CVPixelFormatType.CV32BGRA) throw new Exception ("Image buffer must be BGRA encoded."); CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer (); using (var colorSpace = CGColorSpace.CreateDeviceRGB ()) using (var bitmapContext = new CGBitmapContext (imageBuffer.Handle, (int)imageBuffer.DisplaySize.Width, (int)imageBuffer.DisplaySize.Height, 8, 0, colorSpace, CGImageAlphaInfo.NoneSkipFirst)) { image = bitmapContext.ToImage (); } } return image; }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { if (!gotResult) { LuminanceSource luminance; //connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer) { if (bytes == null) bytes = new byte [pixelBuffer.Height * pixelBuffer.BytesPerRow]; pixelBuffer.Lock (0); Marshal.Copy (pixelBuffer.BaseAddress, bytes, 0, bytes.Length); luminance = new RGBLuminanceSource (bytes, pixelBuffer.Width, pixelBuffer.Height); pixelBuffer.Unlock (0); } var binarized = new BinaryBitmap (new HybridBinarizer (luminance)); var result = reader.decodeWithState (binarized); //parent.session.StopRunning (); gotResult = true; if (parent.Scan != null) parent.Scan (result); } } catch (ReaderException) { // ignore this exception; it happens every time there is a failed scan } catch (Exception e) { // TODO: this one is unexpected.. log or otherwise handle it throw; } finally { try { // lamest thing, but seems that this throws :( sampleBuffer.Dispose (); } catch { } } }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { var frame = ImageFromSampleBuffer (sampleBuffer); Camera.OnFrameCaptured (frame); sampleBuffer.Dispose (); } catch (Exception ex) { Debug.WriteLine (ex); } }
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { var image = ImageFromSampleBuffer (sampleBuffer); // Do something with the image, we just stuff it in our main view. AppDelegate.ImageView.BeginInvokeOnMainThread (delegate { AppDelegate.ImageView.Image = image; }); sampleBuffer.Dispose (); } catch (Exception e){ Console.WriteLine (e); } }
static UIImage ImageFromSampleBuffer (CMSampleBuffer sampleBuffer) { using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer){ pixelBuffer.Lock (CVOptionFlags.None); var baseAddress = pixelBuffer.BaseAddress; int bytesPerRow = pixelBuffer.BytesPerRow; int width = pixelBuffer.Width; int height = pixelBuffer.Height; var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little; using (var cs = CGColorSpace.CreateDeviceRGB ()) using (var context = new CGBitmapContext (baseAddress,width, height, 8, bytesPerRow, cs, (CGImageAlphaInfo) flags)) using (var cgImage = context.ToImage ()){ pixelBuffer.Unlock (CVOptionFlags.None); return UIImage.FromImage (cgImage); } } }
void WriteSampleBuffer (CMSampleBuffer sampleBuffer, NSString mediaType) { if (assetWriter.Status == AVAssetWriterStatus.Unknown){ if (assetWriter.StartWriting ()) assetWriter.StartSessionAtSourceTime (sampleBuffer.OutputPresentationTimeStamp); else ShowError (assetWriter.Error); } if (assetWriter.Status == AVAssetWriterStatus.Writing){ if (mediaType == AVMediaType.Video){ if (assetWriterVideoIn.ReadyForMoreMediaData){ if (!assetWriterVideoIn.AppendSampleBuffer (sampleBuffer)) ShowError (assetWriter.Error); } } else if (mediaType == AVMediaType.Audio){ if (assetWriterAudioIn.ReadyForMoreMediaData){ if (!assetWriterAudioIn.AppendSampleBuffer (sampleBuffer)) ShowError (assetWriter.Error); } } } }
public void DidReadAndWriteSampleBuffer (ReadWriteSampleBufferChannel sampleBufferChannel, CMSampleBuffer sampleBuffer, CVPixelBuffer sampleBufferForWrite) { // Calculate progress (scale of 0.0 to 1.0) double progress = AVReaderWriter.ProgressOfSampleBufferInTimeRange(sampleBuffer, _timeRange); _progressProc((float)progress * 100); // Grab the pixel buffer from the sample buffer, if possible CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer (); var pixelBuffer = imageBuffer as CVPixelBuffer; if (pixelBuffer != null) Delegate.AdjustPixelBuffer (pixelBuffer, sampleBufferForWrite); }
public virtual void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription (); if (connection == videoConnection) { // Get framerate CMTime timestamp = sampleBuffer.PresentationTimeStamp; CalculateFramerateAtTimestamp (timestamp); // Get frame dimensions (for onscreen display) if (VideoDimensions.IsEmpty) VideoDimensions = formatDescription.GetVideoPresentationDimensions (true, false); // Get the buffer type if (VideoType == 0) VideoType = formatDescription.MediaSubType; // Synchronously process the pixel buffer to de-green it. using (var pixelBuffer = sampleBuffer.GetImageBuffer ()) ProcessPixelBuffer (pixelBuffer); previewBufferQueue.Enqueue (sampleBuffer); //var writeBuffer = sampleBuffer.Duplicate (); InvokeOnMainThread (() => { var j = previewBufferQueue.Dequeue (); var sbuf = j as CMSampleBuffer; if (sbuf == null) { #if DEBUG // Record the current sampleBuffer.ClassHandle // Then run another iteration and on the next one, print the ClassHandle Console.WriteLine ("The type is {0}", new NSString (CFCopyDescription (j.Handle))); #endif return; } using (CVImageBuffer pixBuf = sbuf.GetImageBuffer ()) { if (PixelBufferReadyForDisplay != null) PixelBufferReadyForDisplay (pixBuf); } }); } // keep a reference to 'sampleBuffer', movieWritingQueue will remove it CompleteBufferUse (sampleBuffer); movieWritingQueue.DispatchAsync (() => { if (assetWriter != null) { bool wasReadyToRecord = (readyToRecordAudio && readyToRecordVideo); if (connection == videoConnection) { // Initialize the video input if this is not done yet if (!readyToRecordVideo) readyToRecordVideo = SetupAssetWriterVideoInput (formatDescription); // Write the video data to file if (readyToRecordVideo && readyToRecordAudio) { WriteSampleBuffer (sampleBuffer, AVMediaType.Video); } } else if (connection == audioConnection) { if (!readyToRecordAudio) readyToRecordAudio = SetupAssetWriterAudioInput (formatDescription); if (readyToRecordAudio && readyToRecordVideo) WriteSampleBuffer (sampleBuffer, AVMediaType.Audio); } bool isReadyToRecord = (readyToRecordAudio && readyToRecordVideo); if (!wasReadyToRecord && isReadyToRecord) { recordingWillBeStarted = false; IsRecording = true; if (RecordingDidStart != null) RecordingDidStart (); } } CompleteBufferUse (sampleBuffer); }); }
protected override bool Append (CMSampleBuffer sampleBuffer) { // append audio data without modification return WriterInput.AppendSampleBuffer (sampleBuffer); }
public virtual void DidDropSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { }
public CVPixelBuffer CroppedSampleBuffer(CMSampleBuffer sampleBuffer) { var imageBuffer = sampleBuffer.GetImageBuffer(); if (imageBuffer == null) { throw new ArgumentException("Cannot convert to CVImageBuffer"); } // Only doing these calculations once for efficiency. // If the incoming images could change orientation or size during a session, this would need to be reset when that happens. if (!alreadySet) { alreadySet = true; var imageSize = imageBuffer.EncodedSize; /* * Incoming image size is set in VideoCapture.BeginSession as AVCaptureSession.Preset1920x1080; * Which, buffer-wise, is always captured landscape-style, but info.plist specifies that this * app runs only in portrait. Therefore, the buffer is always sideways, i.e., `imageSize == [Width: 1920, Height: 1080]` * * Since our UI blurs out the top and bottom of the image, what we're interested in is the middle * 3/5 of the long side, and the entirety of the 1080 (short side), rotated 90 degrees anti-clockwise. * * To get good alignment, this also requires some manual tweaking (LayoutMargins?), which probably changes * between hardware */ var rotatedSize = new CGSize(imageSize.Height, imageSize.Width); var shorterSide = rotatedSize.Width < rotatedSize.Height ? rotatedSize.Width : rotatedSize.Height; rotateTransform = new CIAffineTransform { Transform = new CGAffineTransform(0, -1, 1, 0, 0, shorterSide) }; cropTransform = new CIAffineTransform { Transform = CGAffineTransform.MakeTranslation(0, (int)(1920.0 / 5) + 60) // Translate down past the cropped area + manual tweak }; edgeDetector = new CIEdges(); } // Convert to CIImage because it is easier to manipulate var ciImage = CIImage.FromImageBuffer(imageBuffer); rotateTransform.Image = ciImage; cropTransform.Image = rotateTransform.OutputImage; edgeDetector.Image = cropTransform.OutputImage; var cropped = edgeDetector.OutputImage; // Note that the above pipeline could be easily appended with other image manipulations. // For example, to change the image contrast, detect edges, etc. It would be most efficient to handle all of // the image manipulation in a single Core Image pipeline because it can be hardware optimized. // Only need to create this buffer one time and then we can reuse it for every frame if (resultBuffer == null || resultBuffer.Handle == IntPtr.Zero) { var targetSize = new CGSize(1080, 1152); //1080, 3/5 * 1920 byte[] data = new byte[(int)targetSize.Height * 4 * (int)targetSize.Width]; resultBuffer = CVPixelBuffer.Create((nint)targetSize.Width, (nint)targetSize.Height, CVPixelFormatType.CV32BGRA, data, 4 * (nint)targetSize.Width, null); if (resultBuffer == null) { throw new Exception("Can't allocate pixel buffer."); } } context.Render(cropped, resultBuffer); // For debugging //var image = ImageBufferToUIImage(resultBuffer); //Console.WriteLine("Image size: " + image.Size); // set breakpoint to see image being provided to CoreML return(resultBuffer); }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { UIImage image = ImageFromSampleBuffer(sampleBuffer); // Do something with the image, we just stuff it in our main view. BeginInvokeOnMainThread(delegate { if (_imageView.Frame.Size != image.Size) { _imageView.Frame = new CGRect(CGPoint.Empty, image.Size); } _imageView.Image = image; }); // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose(); } catch (Exception e) { Console.WriteLine(e); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds; if (msSinceLastPreview < options.DelayBetweenAnalyzingFrames || (wasScanned && msSinceLastPreview < options.DelayBetweenContinuousScans) || working || CancelTokenSource.IsCancellationRequested) { if (sampleBuffer != null) { sampleBuffer.Dispose(); sampleBuffer = null; } return; } wasScanned = false; working = true; lastAnalysis = DateTime.UtcNow; try { // Get the CoreVideo image using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { // Lock the base address pixelBuffer.Lock(CVPixelBufferLock.ReadOnly); // MAYBE NEEDS READ/WRITE CVPixelBufferARGB32LuminanceSource luminanceSource; // Let's access the raw underlying data and create a luminance source from it unsafe { var rawData = (byte *)pixelBuffer.BaseAddress.ToPointer(); var rawDatalen = (int)(pixelBuffer.Height * pixelBuffer.Width * 4); //This drops 8 bytes from the original length to give us the expected length luminanceSource = new CVPixelBufferARGB32LuminanceSource(rawData, rawDatalen, (int)pixelBuffer.Width, (int)pixelBuffer.Height); } HandleImage(luminanceSource); pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly); } // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose(); sampleBuffer = null; } catch (Exception e) { Console.WriteLine(e); } working = false; }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription(); if (connection == processor.videoConnection) { // Get framerate CMTime timestamp = sampleBuffer.PresentationTimeStamp; CalculateFramerateAtTimestamp(timestamp); // Get frame dimensions (for onscreen display) if (processor.VideoDimensions.Width == 0 && processor.VideoDimensions.Height == 0) { processor.VideoDimensions = formatDescription.GetVideoPresentationDimensions(true, false); } // Get the buffer type if (processor.VideoType == 0) { processor.VideoType = formatDescription.MediaSubType; } // TODO: processor.VideoType = (CMVideoCodecType)Enum.ToObject (typeof(CMVideoCodecType), formatDescription.MediaSubType); // Synchronously process the pixel buffer to de-green it. using (var pixelBuffer = sampleBuffer.GetImageBuffer()) ProcessPixelBuffer(pixelBuffer); processor.previewBufferQueue.Enqueue(sampleBuffer); //var writeBuffer = sampleBuffer.Duplicate (); InvokeOnMainThread(() => { var j = processor.previewBufferQueue.Dequeue(); var sbuf = j as CMSampleBuffer; if (sbuf == null) { // Record the current sampleBuffer.ClassHandle // Then run another iteration and on the next one, print the ClassHandle Console.WriteLine("The type is {0}", new NSString(CFCopyDescription(j.Handle))); return; } using (CVImageBuffer pixBuf = sbuf.GetImageBuffer()){ if (processor.PixelBufferReadyForDisplay != null) { processor.PixelBufferReadyForDisplay(pixBuf); } } if (processor.assetWriter == null) { sbuf.Dispose(); } else { processor.CompleteBufferUse(sbuf); } }); } processor.movieWritingQueue.DispatchAsync(() => { if (processor.assetWriter != null) { bool wasReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo); // Initialize the video input if this is not done yet if (!processor.readyToRecordVideo) { processor.readyToRecordVideo = SetupAssetWriterVideoInput(formatDescription); } // Write the video data to file if (processor.readyToRecordVideo && processor.readyToRecordAudio) { processor.WriteSampleBuffer(sampleBuffer, AVMediaType.Video); } bool isReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo); if (!wasReadyToRecord && isReadyToRecord) { processor.recordingWillBeStarted = false; processor.IsRecording = true; if (processor.RecordingDidStart != null) { processor.RecordingDidStart(); } } processor.CompleteBufferUse(sampleBuffer); } }); }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { var image = ImageFromSampleBuffer (sampleBuffer); sampleBuffer.Dispose(); captureOutput.Dispose(); connection.Dispose(); if (barChecking == false) { barChecking = true; codeImage = image; Thread barCodeThread = new Thread (new ThreadStart (CheckBarCode)); barCodeThread.Start (); } AppDelegate.main.decryptVC.imageCaptureView.BeginInvokeOnMainThread (delegate { AppDelegate.main.decryptVC.imageCaptureView.Image = image; }); } catch (Exception e) { Console.WriteLine (e); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { // TODO: Implement - see: http://go-mono.com/docs/index.aspx?link=T%3aMonoTouch.Foundation.ModelAttribute if (Configuration.IsScanning) { try { UIImage image = GetImageFromSampleBuffer(sampleBuffer); var visionImage = new VisionImage(image); visionImage.Metadata = metadata; DetectBarcodeActionAsync(visionImage); } catch { } finally { sampleBuffer.Dispose(); } } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { if (!gotResult) { LuminanceSource luminance; //connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; using (var pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer) { if (bytes == null) { bytes = new byte [pixelBuffer.Height * pixelBuffer.BytesPerRow]; } pixelBuffer.Lock(0); Marshal.Copy(pixelBuffer.BaseAddress, bytes, 0, bytes.Length); luminance = new RGBLuminanceSource(bytes, pixelBuffer.Width, pixelBuffer.Height); pixelBuffer.Unlock(0); } var binarized = new BinaryBitmap(new HybridBinarizer(luminance)); var result = reader.decodeWithState(binarized); //parent.session.StopRunning (); gotResult = true; if (parent.Scan != null) { parent.Scan(result); } } } catch (ReaderException) { // ignore this exception; it happens every time there is a failed scan } catch (Exception e) { // TODO: this one is unexpected.. log or otherwise handle it throw; } finally { try { // lamest thing, but seems that this throws :( sampleBuffer.Dispose(); } catch { } } }
public override void DidOutputSampleBuffer(AVCaptureOutput output, CMSampleBuffer buffer, AVCaptureConnection con) { // Implement // - see: http://go-mono.com/docs/index.aspx?link=T%3aMonoTouch.Foundation.ModelAttribute // }
public virtual void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { // HACK: Change CMSampleBuffer.GetFormatDescription() to CMSampleBuffer.GetVideoFormatDescription() // HACK Change CMFormatDescription to CMVideoFormatDescription // CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription (); CMVideoFormatDescription formatDescription = sampleBuffer.GetVideoFormatDescription(); if (connection == videoConnection) { // Get framerate CMTime timestamp = sampleBuffer.PresentationTimeStamp; CalculateFramerateAtTimestamp(timestamp); // Get frame dimensions (for onscreen display) if (VideoDimensions.IsEmpty) { // HACK: Change GetVideoPresentationDimensions() to GetPresentationDimensions() // VideoDimensions = formatDescription.GetVideoPresentationDimensions (true, false); VideoDimensions = formatDescription.GetPresentationDimensions(true, false); } // Get the buffer type if (VideoType == 0) { VideoType = formatDescription.MediaSubType; } // Synchronously process the pixel buffer to de-green it. using (var pixelBuffer = sampleBuffer.GetImageBuffer()) ProcessPixelBuffer(pixelBuffer); previewBufferQueue.Enqueue(sampleBuffer); //var writeBuffer = sampleBuffer.Duplicate (); InvokeOnMainThread(() => { INativeObject j = previewBufferQueue.Dequeue(); var sbuf = j as CMSampleBuffer; if (sbuf == null) { #if DEBUG // Record the current sampleBuffer.ClassHandle // Then run another iteration and on the next one, print the ClassHandle Console.WriteLine("The type is {0}", j.ToString()); #endif return; } using (CVImageBuffer pixBuf = sbuf.GetImageBuffer()) { if (PixelBufferReadyForDisplay != null) { PixelBufferReadyForDisplay(pixBuf); } } }); } // keep a reference to 'sampleBuffer', movieWritingQueue will remove it CompleteBufferUse(sampleBuffer); movieWritingQueue.DispatchAsync(() => { if (assetWriter != null) { bool wasReadyToRecord = (readyToRecordAudio && readyToRecordVideo); if (connection == videoConnection) { // Initialize the video input if this is not done yet if (!readyToRecordVideo) { readyToRecordVideo = SetupAssetWriterVideoInput(formatDescription); } // Write the video data to file if (readyToRecordVideo && readyToRecordAudio) { WriteSampleBuffer(sampleBuffer, AVMediaType.Video); } } else if (connection == audioConnection) { if (!readyToRecordAudio) { readyToRecordAudio = SetupAssetWriterAudioInput(formatDescription); } if (readyToRecordAudio && readyToRecordVideo) { WriteSampleBuffer(sampleBuffer, AVMediaType.Audio); } } bool isReadyToRecord = (readyToRecordAudio && readyToRecordVideo); if (!wasReadyToRecord && isReadyToRecord) { recordingWillBeStarted = false; IsRecording = true; if (RecordingDidStart != null) { RecordingDidStart(); } } } CompleteBufferUse(sampleBuffer); }); }
public void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { var currentDate = DateTime.Now; //Console.WriteLine("DidOutputSampleBuffer: " + currentDate + " " + lastAnalysis + " " + currentDate.Subtract(lastAnalysis).Milliseconds); // control the pace of the machine vision to protect battery life if (currentDate - lastAnalysis >= pace) { lastAnalysis = currentDate; } else { //Console.WriteLine("-- skip --"); return; // don't run the classifier more often than we need } // keep track of performance and log the frame rate if (trackPerformance) { frameCount = frameCount + 1; if (frameCount % framesPerSample == 0) { var diff = currentDate.Subtract(startDate); if (diff.Seconds > 0) { if (pace > TimeSpan.Zero) { Console.WriteLine("WARNING: Frame rate of image classification is being limited by \"pace\" setting. Set to 0.0 for fastest possible rate."); } } Console.WriteLine($"{diff.Seconds / framesPerSample}s per frame (average"); } startDate = currentDate; } // Crop and resize the image data. // Note, this uses a Core Image pipeline that could be appended with other pre-processing. // If we don't want to do anything custom, we can remove this step and let the Vision framework handle // crop and resize as long as we are careful to pass the orientation properly. using (var croppedBuffer = CroppedSampleBuffer(sampleBuffer, targetImageSize)) { if (croppedBuffer == null) { return; } try { VNImageOptions options = new VNImageOptions(); classifierRequestHandler = new VNImageRequestHandler(croppedBuffer, options); NSError err; classifierRequestHandler.Perform(ClassificationRequest, out err); if (err != null) { Console.WriteLine(err); } } catch (Exception error) { Console.WriteLine(error); } } } finally { sampleBuffer.Dispose(); } }
public void DidFinishProcessingPhoto(AVCapturePhotoOutput captureOutput, CMSampleBuffer photoSampleBuffer, CMSampleBuffer previewPhotoSampleBuffer, AVCaptureResolvedPhotoSettings resolvedSettings, AVCaptureBracketedStillImageSettings bracketSettings, NSError error) { if (photoSampleBuffer != null) { photoData = AVCapturePhotoOutput.GetJpegPhotoDataRepresentation(photoSampleBuffer, previewPhotoSampleBuffer); } else { Console.WriteLine($"Error capturing photo: {error.LocalizedDescription}"); } }
public CVPixelBuffer CroppedSampleBuffer(CMSampleBuffer sampleBuffer, CGSize targetSize) { var imageBuffer = sampleBuffer.GetImageBuffer(); if (imageBuffer == null) { throw new ArgumentException("Cannot convert to CVImageBuffer"); } // Only doing these calculations once for efficiency. // If the incoming images could change orientation or size during a session, this would need to be reset when that happens. if (!alreadySet) { alreadySet = true; var imageSize = imageBuffer.EncodedSize; var rotatedSize = new CGSize(imageSize.Height, imageSize.Width); if (targetSize.Width > rotatedSize.Width || targetSize.Height > rotatedSize.Height) { throw new NotSupportedException("Captured image is smaller than image size for model."); } var shorterSide = rotatedSize.Width < rotatedSize.Height ? rotatedSize.Width : rotatedSize.Height; rotateTransform = new CIAffineTransform { Transform = new CGAffineTransform(0, -1, 1, 0, 0, shorterSide) //Transform = CGAffineTransform.MakeIdentity() }; var scale = targetSize.Width / shorterSide; scaleTransform = new CIAffineTransform { Transform = CGAffineTransform.MakeScale(scale, scale), }; var xDiff = rotatedSize.Width * scale - targetSize.Width; var yDiff = rotatedSize.Height * scale - targetSize.Height; cropTransform = new CIAffineTransform { //Transform = CGAffineTransform.MakeTranslation(xDiff / 2.0f, yDiff / 2.0f), Transform = CGAffineTransform.MakeIdentity() }; } // Convert to CIImage because it is easier to manipulate var ciImage = CIImage.FromImageBuffer(imageBuffer); rotateTransform.Image = ciImage; scaleTransform.Image = rotateTransform.OutputImage; cropTransform.Image = scaleTransform.OutputImage; var cropped = cropTransform.OutputImage; // Note that the above pipeline could be easily appended with other image manipulations. // For example, to change the image contrast. It would be most efficient to handle all of // the image manipulation in a single Core Image pipeline because it can be hardware optimized. // Only need to create this buffer one time and then we can reuse it for every frame if (resultBuffer == null || resultBuffer.Handle == IntPtr.Zero) { byte[] data = new byte[(int)targetSize.Height * 4 * (int)targetSize.Width]; resultBuffer = CVPixelBuffer.Create((nint)targetSize.Width, (nint)targetSize.Height, CVPixelFormatType.CV32BGRA, data, 4 * (nint)targetSize.Width, null); // HACK if (resultBuffer == null) { throw new Exception("Can't allocate pixel buffer."); } } context.Render(cropped, resultBuffer); // For debugging //var image = ImageBufferToUIImage(resultBuffer); //Console.WriteLine("Image size: " + image.Size); // set breakpoint to see image being provided to CoreML return(resultBuffer); }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds; if (msSinceLastPreview < options.DelayBetweenAnalyzingFrames || (wasScanned && msSinceLastPreview < options.DelayBetweenContinuousScans) || working || CancelTokenSource.IsCancellationRequested) { if (sampleBuffer != null) { sampleBuffer.Dispose(); sampleBuffer = null; } return; } wasScanned = false; working = true; lastAnalysis = DateTime.UtcNow; try { using (var image = ImageFromSampleBuffer(sampleBuffer)) if (HandleImage(image)) { wasScanned = true; } // // Although this looks innocent "Oh, he is just optimizing this case away" // this is incredibly important to call on this callback, because the AVFoundation // has a fixed number of buffers and if it runs out of free buffers, it will stop // delivering frames. // sampleBuffer.Dispose(); sampleBuffer = null; } catch (Exception e) { Console.WriteLine(e); } working = false; }
protected abstract bool Append(CMSampleBuffer sampleBuffer);