Example #1
0
        public override void ProcessSampleBuffer(CoreMedia.CMSampleBuffer sampleBuffer, RPSampleBufferType sampleBufferType)
        {
            switch (sampleBufferType)
            {
            case RPSampleBufferType.Video:
                _bufferCopy = sampleBuffer;
                _lastSendTs = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;
                break;

            case RPSampleBufferType.AudioApp:
                // Handle audio sample buffer for app audio
                break;

            case RPSampleBufferType.AudioMic:
                // Handle audio sample buffer for app audio
                break;
            }
        }
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CoreMedia.CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            // Trap all errors
            try
            {
                if (frameCount >= frameInterval)
                {
                    // Grab an image from the buffer
                    var image = GetImageFromSampleBuffer(sampleBuffer);


                    // Display the image
                    if (View != null)
                    {
                        View.BeginInvokeOnMainThread(async() => {
                            if (OnFrameRecieved != null)
                            {
                                await OnFrameRecieved(image);
                            }


                            //// Set the image
                            //if (DisplayView.Image != null) DisplayView.Image.Dispose();
                            //DisplayView.Image = image;

                            //// Rotate image to the correct display orientation
                            //DisplayView.Transform = CGAffineTransform.MakeRotation((float)Math.PI / 2);
                        });
                    }

                    frameCount = 0;
                }
                frameCount++;

                // IMPORTANT: You must release the buffer because AVFoundation has a fixed number
                // of buffers and will stop delivering frames if it runs out.
                sampleBuffer.Dispose();
            }
            catch (Exception e)
            {
                // Report error
                Console.WriteLine("Error sampling buffer: {0}", e.Message);
            }
        }
        public unsafe static CMSampleBuffer?CreateWithNewTiming(CMSampleBuffer original, CMSampleTimingInfo []?timing, out OSStatus status)
        {
            if (original is null)
            {
                ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(original));
            }

            nint   count = timing is null ? 0 : timing.Length;
            IntPtr handle;

            fixed(CMSampleTimingInfo *t = timing)
            {
                status = CMSampleBufferCreateCopyWithNewTiming(IntPtr.Zero, original.Handle, count, t, out handle);
                if (status != (OSStatus)0)
                {
                    return(null);
                }
            }

            return(new CMSampleBuffer(handle, true));
        }
Example #4
0
        public CMSampleBufferError TrackDataReadiness(CMSampleBuffer bufferToTrack)
        {
            var handleToTrack = bufferToTrack == null ? IntPtr.Zero : bufferToTrack.handle;

            return(CMSampleBufferTrackDataReadiness(handle, handleToTrack));
        }
Example #5
0
        public static CMSampleBuffer CreateWithNewTiming(CMSampleBuffer original, CMSampleTimingInfo [] timing)
        {
            OSStatus status;

            return(CreateWithNewTiming(original, timing, out status));
        }
 public CMSampleBufferError TrackDataReadiness(CMSampleBuffer bufferToTrack)
 {
     return(CMSampleBufferTrackDataReadiness(Handle, bufferToTrack.GetHandle()));
 }