// This runs on the movieWritingQueue already public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { if (processor.assetWriter != null) { var formatDescription = sampleBuffer.GetFormatDescription(); bool wasReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo); // Initalize the audio input if this is not done yet if (!processor.readyToRecordAudio) { processor.readyToRecordAudio = SetupAssetWriterAudioInput(formatDescription); } // Write audio data to file if (processor.readyToRecordAudio && processor.readyToRecordVideo) { processor.WriteSampleBuffer(sampleBuffer, AVMediaType.Audio); } bool isReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo); if (!wasReadyToRecord && isReadyToRecord) { processor.recordingWillBeStarted = false; processor.IsRecording = true; if (processor.RecordingDidStart != null) { processor.RecordingDidStart(); } } } } finally { sampleBuffer.Dispose(); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription(); if (connection == processor.videoConnection) { // Get framerate CMTime timestamp = sampleBuffer.PresentationTimeStamp; CalculateFramerateAtTimestamp(timestamp); // Get frame dimensions (for onscreen display) if (processor.VideoDimensions.Width == 0 && processor.VideoDimensions.Height == 0) { processor.VideoDimensions = formatDescription.GetVideoPresentationDimensions(true, false); } // Get the buffer type if (processor.VideoType == 0) { processor.VideoType = formatDescription.MediaSubType; } // TODO: processor.VideoType = (CMVideoCodecType)Enum.ToObject (typeof(CMVideoCodecType), formatDescription.MediaSubType); // Synchronously process the pixel buffer to de-green it. using (var pixelBuffer = sampleBuffer.GetImageBuffer()) ProcessPixelBuffer(pixelBuffer); processor.previewBufferQueue.Enqueue(sampleBuffer); //var writeBuffer = sampleBuffer.Duplicate (); InvokeOnMainThread(() => { var j = processor.previewBufferQueue.Dequeue(); var sbuf = j as CMSampleBuffer; if (sbuf == null) { // Record the current sampleBuffer.ClassHandle // Then run another iteration and on the next one, print the ClassHandle Console.WriteLine("The type is {0}", new NSString(CFCopyDescription(j.Handle))); return; } using (CVImageBuffer pixBuf = sbuf.GetImageBuffer()){ if (processor.PixelBufferReadyForDisplay != null) { processor.PixelBufferReadyForDisplay(pixBuf); } } if (processor.assetWriter == null) { sbuf.Dispose(); } else { processor.CompleteBufferUse(sbuf); } }); } processor.movieWritingQueue.DispatchAsync(() => { if (processor.assetWriter != null) { bool wasReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo); // Initialize the video input if this is not done yet if (!processor.readyToRecordVideo) { processor.readyToRecordVideo = SetupAssetWriterVideoInput(formatDescription); } // Write the video data to file if (processor.readyToRecordVideo && processor.readyToRecordAudio) { processor.WriteSampleBuffer(sampleBuffer, AVMediaType.Video); } bool isReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo); if (!wasReadyToRecord && isReadyToRecord) { processor.recordingWillBeStarted = false; processor.IsRecording = true; if (processor.RecordingDidStart != null) { processor.RecordingDidStart(); } } processor.CompleteBufferUse(sampleBuffer); } }); }